Merge remote-tracking branch 'upstream/develop' into enhancement/mitre-ui-review-fixes

This commit is contained in:
VakarisZ 2020-03-23 12:54:32 +02:00
commit 372e05eb13
23 changed files with 499 additions and 59 deletions

View File

@ -0,0 +1,8 @@
from abc import ABCMeta, abstractmethod
class Analyzer(object, metaclass=ABCMeta):
@abstractmethod
def analyze_test_results(self):
raise NotImplementedError()

View File

@ -1,7 +1,8 @@
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
from envs.monkey_zoo.blackbox.analyzers.analyzer_log import AnalyzerLog
class CommunicationAnalyzer(object):
class CommunicationAnalyzer(Analyzer):
def __init__(self, island_client, machine_ips):
self.island_client = island_client

View File

@ -0,0 +1,59 @@
import logging
from datetime import timedelta
from envs.monkey_zoo.blackbox.analyzers.analyzer import Analyzer
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
MAX_ALLOWED_SINGLE_PAGE_TIME = timedelta(seconds=2)
MAX_ALLOWED_TOTAL_TIME = timedelta(seconds=5)
REPORT_URLS = [
"api/report/security",
"api/attack/report",
"api/report/zero_trust/findings",
"api/report/zero_trust/principles",
"api/report/zero_trust/pillars"
]
logger = logging.getLogger(__name__)
class PerformanceAnalyzer(Analyzer):
def __init__(self, island_client: MonkeyIslandClient, break_if_took_too_long=False):
self.break_if_took_too_long = break_if_took_too_long
self.island_client = island_client
def analyze_test_results(self) -> bool:
if not self.island_client.is_all_monkeys_dead():
raise RuntimeError("Can't test report times since not all Monkeys have died.")
# Collect timings for all pages
self.island_client.clear_caches()
report_resource_to_response_time = {}
for url in REPORT_URLS:
report_resource_to_response_time[url] = self.island_client.get_elapsed_for_get_request(url)
# Calculate total time and check each page
single_page_time_less_then_max = True
total_time = timedelta()
for page, elapsed in report_resource_to_response_time.items():
logger.info(f"page {page} took {str(elapsed)}")
total_time += elapsed
if elapsed > MAX_ALLOWED_SINGLE_PAGE_TIME:
single_page_time_less_then_max = False
total_time_less_then_max = total_time < MAX_ALLOWED_TOTAL_TIME
logger.info(f"total time is {str(total_time)}")
performance_is_good_enough = total_time_less_then_max and single_page_time_less_then_max
if self.break_if_took_too_long and not performance_is_good_enough:
logger.warning(
"Calling breakpoint - pausing to enable investigation of island. Type 'c' to continue once you're done "
"investigating. Type 'p timings' and 'p total_time' to see performance information."
)
breakpoint()
return performance_is_good_enough

View File

@ -1,3 +1,4 @@
from datetime import timedelta
from time import sleep
import json
@ -85,3 +86,23 @@ class MonkeyIslandClient(object):
def is_all_monkeys_dead(self):
query = {'dead': False}
return len(self.find_monkeys_in_db(query)) == 0
def clear_caches(self):
"""
Tries to clear caches.
:raises: If error (by error code), raises the error
:return: The response
"""
response = self.requests.get("api/test/clear_caches")
response.raise_for_status()
return response
def get_elapsed_for_get_request(self, url):
response = self.requests.get(url)
if response.ok:
LOGGER.debug(f"Got ok for {url} content peek:\n{response.content[:120].strip()}")
return response.elapsed
else:
LOGGER.error(f"Trying to get {url} but got unexpected {str(response)}")
# instead of raising for status, mark failed responses as maxtime
return timedelta.max()

View File

@ -61,6 +61,14 @@ class MonkeyIslandRequests(object):
headers=self.get_jwt_header(),
verify=False)
@_Decorators.refresh_jwt_token
def delete(self, url):
return requests.delete( # noqa: DOU123
self.addr + url,
headers=self.get_jwt_header(),
verify=False
)
@_Decorators.refresh_jwt_token
def get_jwt_header(self):
return {"Authorization": "JWT " + self.token}

View File

@ -0,0 +1,186 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"12345678",
"^NgDvY59~8"
],
"exploit_user_list": [
"Administrator",
"m0nk3y",
"user"
]
},
"general": {
"should_exploit": true
}
},
"basic_network": {
"general": {
"blocked_ips": [],
"depth": 2,
"local_network_scan": false,
"subnet_scan_list": [
"10.2.2.2",
"10.2.2.4"
]
},
"network_analysis": {
"inaccessible_subnets": []
}
},
"cnc": {
"servers": {
"command_servers": [
"10.2.2.251:5000"
],
"current_server": "10.2.2.251:5000",
"internet_services": [
"monkey.guardicore.com",
"www.google.com"
]
}
},
"exploits": {
"general": {
"exploiter_classes": [
"SSHExploiter",
"MSSQLExploiter",
"ElasticGroovyExploiter",
"HadoopExploiter"
],
"skip_exploit_if_file_exist": false
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
"remote_user_pass": "Password1!",
"user_to_add": "Monkey_IUSER_SUPPORT"
},
"rdp_grinder": {
"rdp_use_vbs_download": true
},
"sambacry": {
"sambacry_folder_paths_to_guess": [
"/",
"/mnt",
"/tmp",
"/storage",
"/export",
"/share",
"/shares",
"/home"
],
"sambacry_shares_not_to_check": [
"IPC$",
"print$"
],
"sambacry_trigger_timeout": 5
},
"smb_service": {
"smb_download_timeout": 300,
"smb_service_name": "InfectionMonkey"
}
},
"internal": {
"classes": {
"finger_classes": [
"SMBFinger",
"SSHFinger",
"PingScanner",
"HTTPFinger",
"MySQLFinger",
"MSSQLFinger",
"ElasticFinger"
]
},
"dropper": {
"dropper_date_reference_path_linux": "/bin/sh",
"dropper_date_reference_path_windows": "%windir%\\system32\\kernel32.dll",
"dropper_set_date": true,
"dropper_target_path_linux": "/tmp/monkey",
"dropper_target_path_win_32": "C:\\Windows\\temp\\monkey32.exe",
"dropper_target_path_win_64": "C:\\Windows\\temp\\monkey64.exe",
"dropper_try_move_first": true
},
"exploits": {
"exploit_lm_hash_list": [],
"exploit_ntlm_hash_list": [],
"exploit_ssh_keys": []
},
"general": {
"keep_tunnel_open_time": 1,
"monkey_dir_name": "monkey_dir",
"singleton_mutex_name": "{2384ec59-0df8-4ab9-918c-843740924a28}"
},
"kill_file": {
"kill_file_path_linux": "/var/run/monkey.not",
"kill_file_path_windows": "%windir%\\monkey.not"
},
"logging": {
"dropper_log_path_linux": "/tmp/user-1562",
"dropper_log_path_windows": "%temp%\\~df1562.tmp",
"monkey_log_path_linux": "/tmp/user-1563",
"monkey_log_path_windows": "%temp%\\~df1563.tmp",
"send_log_to_server": true
}
},
"monkey": {
"behaviour": {
"PBA_linux_filename": "",
"PBA_windows_filename": "",
"custom_PBA_linux_cmd": "",
"custom_PBA_windows_cmd": "",
"self_delete_in_cleanup": true,
"serialize_config": false,
"use_file_logging": true
},
"general": {
"alive": true,
"post_breach_actions": []
},
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": false,
"should_use_mimikatz": true
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 500
},
"tcp_scanner": {
"HTTP_PORTS": [
80,
8080,
443,
8008,
7001
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 1000,
"tcp_target_ports": [
22,
2222,
445,
135,
3389,
80,
8080,
443,
8008,
3306,
9200,
7001
]
}
}
}

View File

@ -1,17 +1,8 @@
{
"basic": {
"credentials": {
"exploit_password_list": [
"Password1!",
"1234",
"password",
"12345678"
],
"exploit_user_list": [
"Administrator",
"root",
"user"
]
"exploit_password_list": [],
"exploit_user_list": []
},
"general": {
"should_exploit": true
@ -46,18 +37,9 @@
"exploits": {
"general": {
"exploiter_classes": [
"SmbExploiter",
"WmiExploiter",
"SSHExploiter",
"ShellShockExploiter",
"SambaCryExploiter",
"ElasticGroovyExploiter",
"Struts2Exploiter",
"WebLogicExploiter",
"HadoopExploiter",
"VSFTPDExploiter"
],
"skip_exploit_if_file_exist": false
"Struts2Exploiter"
],
"skip_exploit_if_file_exist": true
},
"ms08_067": {
"ms08_067_exploit_attempts": 5,
@ -149,19 +131,19 @@
"life_cycle": {
"max_iterations": 1,
"retry_failed_explotation": true,
"timeout_between_iterations": 100,
"timeout_between_iterations": 30,
"victims_max_exploit": 7,
"victims_max_find": 30
},
"system_info": {
"collect_system_info": true,
"extract_azure_creds": true,
"should_use_mimikatz": true
"collect_system_info": false,
"extract_azure_creds": false,
"should_use_mimikatz": false
}
},
"network": {
"ping_scanner": {
"ping_scan_timeout": 1000
"ping_scan_timeout": 100
},
"tcp_scanner": {
"HTTP_PORTS": [
@ -173,7 +155,7 @@
],
"tcp_scan_get_banner": true,
"tcp_scan_interval": 0,
"tcp_scan_timeout": 3000,
"tcp_scan_timeout": 300,
"tcp_target_ports": [
22,
2222,

View File

@ -4,6 +4,7 @@ import logging
import pytest
from time import sleep
from envs.monkey_zoo.blackbox.analyzers.performance_analyzer import PerformanceAnalyzer
from envs.monkey_zoo.blackbox.island_client.monkey_island_client import MonkeyIslandClient
from envs.monkey_zoo.blackbox.analyzers.communication_analyzer import CommunicationAnalyzer
from envs.monkey_zoo.blackbox.island_client.island_config_parser import IslandConfigParser
@ -58,12 +59,30 @@ class TestMonkeyBlackbox(object):
config_parser = IslandConfigParser(conf_filename)
analyzer = CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
BasicTest(test_name,
island_client,
config_parser,
[analyzer],
timeout_in_seconds,
log_handler).run()
BasicTest(
name=test_name,
island_client=island_client,
config_parser=config_parser,
analyzers=[analyzer],
timeout=timeout_in_seconds,
post_exec_analyzers=[],
log_handler=log_handler).run()
@staticmethod
def run_performance_test(island_client, conf_filename, test_name, timeout_in_seconds):
config_parser = IslandConfigParser(conf_filename)
log_handler = TestLogsHandler(test_name, island_client, TestMonkeyBlackbox.get_log_dir_path())
BasicTest(
name=test_name,
island_client=island_client,
config_parser=config_parser,
analyzers=[CommunicationAnalyzer(island_client, config_parser.get_ips_of_targets())],
timeout=timeout_in_seconds,
post_exec_analyzers=[PerformanceAnalyzer(
island_client,
break_if_took_too_long=False
)],
log_handler=log_handler).run()
@staticmethod
def get_log_dir_path():
@ -108,3 +127,17 @@ class TestMonkeyBlackbox(object):
def test_wmi_pth(self, island_client):
TestMonkeyBlackbox.run_basic_test(island_client, "WMI_PTH.conf", "WMI_PTH")
def test_performance(self, island_client):
"""
This test includes the SSH + Elastic + Hadoop + MSSQL machines all in one test
for a total of 8 machines including the Monkey Island.
Is has 2 analyzers - the regular one which checks all the Monkeys
and the Timing one which checks how long the report took to execute
"""
TestMonkeyBlackbox.run_performance_test(
island_client,
"PERFORMANCE.conf",
"test_report_performance",
timeout_in_seconds=10*60)

View File

@ -1,4 +1,3 @@
import json
from time import sleep
import logging
@ -14,11 +13,12 @@ LOGGER = logging.getLogger(__name__)
class BasicTest(object):
def __init__(self, name, island_client, config_parser, analyzers, timeout, log_handler):
def __init__(self, name, island_client, config_parser, analyzers, timeout, post_exec_analyzers, log_handler):
self.name = name
self.island_client = island_client
self.config_parser = config_parser
self.analyzers = analyzers
self.post_exec_analyzers = post_exec_analyzers
self.timeout = timeout
self.log_handler = log_handler
@ -32,13 +32,13 @@ class BasicTest(object):
self.island_client.kill_all_monkeys()
self.wait_until_monkeys_die()
self.wait_for_monkey_process_to_finish()
self.test_post_exec_analyzers()
self.parse_logs()
self.island_client.reset_env()
def print_test_starting_info(self):
LOGGER.info("Started {} test".format(self.name))
LOGGER.info("Machines participating in test:")
LOGGER.info(" ".join(self.config_parser.get_ips_of_targets()))
LOGGER.info("Machines participating in test: " + ", ".join(self.config_parser.get_ips_of_targets()))
print("")
def test_until_timeout(self):
@ -62,10 +62,8 @@ class BasicTest(object):
timer.get_time_taken()))
def all_analyzers_pass(self):
for analyzer in self.analyzers:
if not analyzer.analyze_test_results():
return False
return True
analyzers_results = [analyzer.analyze_test_results() for analyzer in self.analyzers]
return all(analyzers_results)
def get_analyzer_logs(self):
log = ""
@ -94,4 +92,9 @@ class BasicTest(object):
If we try to launch monkey during that time window monkey will fail to start, that's
why test needs to wait a bit even after all monkeys are dead.
"""
LOGGER.debug("Waiting for Monkey process to close...")
sleep(TIME_FOR_MONKEY_PROCESS_TO_FINISH)
def test_post_exec_analyzers(self):
post_exec_analyzers_results = [analyzer.analyze_test_results() for analyzer in self.post_exec_analyzers]
assert all(post_exec_analyzers_results)

View File

@ -25,6 +25,7 @@ from monkey_island.cc.resources.root import Root
from monkey_island.cc.resources.telemetry import Telemetry
from monkey_island.cc.resources.telemetry_feed import TelemetryFeed
from monkey_island.cc.resources.pba_file_download import PBAFileDownload
from monkey_island.cc.resources.test.clear_caches import ClearCaches
from monkey_island.cc.resources.version_update import VersionUpdate
from monkey_island.cc.resources.pba_file_upload import FileUpload
from monkey_island.cc.resources.attack.attack_config import AttackConfiguration
@ -116,6 +117,7 @@ def init_api_resources(api):
api.add_resource(VersionUpdate, '/api/version-update', '/api/version-update/')
api.add_resource(MonkeyTest, '/api/test/monkey')
api.add_resource(ClearCaches, '/api/test/clear_caches')
api.add_resource(LogTest, '/api/test/log')

View File

@ -1,4 +1,4 @@
"""
This package contains resources used by blackbox tests
to analize test results, download logs and so on.
to analyze test results, download logs and so on.
"""

View File

@ -0,0 +1,35 @@
import logging
import flask_restful
from monkey_island.cc.auth import jwt_required
from monkey_island.cc.services.attack.attack_report import AttackReportService
from monkey_island.cc.services.reporting.report import ReportService
NOT_ALL_REPORTS_DELETED = "Not all reports have been cleared from the DB!"
logger = logging.getLogger(__name__)
class ClearCaches(flask_restful.Resource):
"""
Used for timing tests - we want to get actual execution time of functions in BlackBox without caching - so we use this
to clear the caches.
:note: DO NOT CALL THIS IN PRODUCTION CODE as this will slow down the user experience.
"""
@jwt_required()
def get(self, **kw):
try:
logger.warning("Trying to clear caches! Make sure this is not production")
ReportService.delete_saved_report_if_exists()
AttackReportService.delete_saved_report_if_exists()
# TODO: Monkey.clear_caches(), clear LRU caches of function in the Monkey object
except RuntimeError as e:
logger.exception(e)
flask_restful.abort(500, error_info=str(e))
if ReportService.is_report_generated() or AttackReportService.is_report_generated():
logger.error(NOT_ALL_REPORTS_DELETED)
flask_restful.abort(500, error_info=NOT_ALL_REPORTS_DELETED)
return {"success": "true"}

View File

@ -103,3 +103,9 @@ class AttackReportService:
"""
generated_report = mongo.db.attack_report.find_one({})
return generated_report is not None
@staticmethod
def delete_saved_report_if_exists():
delete_result = mongo.db.attack_report.delete_many({})
if mongo.db.attack_report.count_documents({}) != 0:
raise RuntimeError("Attack Report cache not cleared. DeleteResult: " + delete_result.raw_result)

View File

@ -1,4 +1,5 @@
from monkey_island.cc.services.attack.technique_reports import AttackTechnique
from monkey_island.cc.services.reporting.report import ReportService
from common.utils.attack_utils import ScanStatus
from monkey_island.cc.database import mongo
@ -23,4 +24,6 @@ class T1003(AttackTechnique):
else:
status = ScanStatus.UNSCANNED.value
data.update(T1003.get_message_and_status(status))
data['stolen_creds'] = ReportService.get_stolen_creds()
data['stolen_creds'].extend(ReportService.get_ssh_keys())
return data

View File

@ -11,7 +11,7 @@ class T1016(AttackTechnique):
scanned_msg = ""
used_msg = "Monkey gathered network configurations on systems in the network."
query = [{'$match': {'telem_category': 'system_info'}},
query = [{'$match': {'telem_category': 'system_info', 'data.network_info': {'$exists': True}}},
{'$project': {'machine': {'hostname': '$data.hostname', 'ips': '$data.network_info.networks'},
'networks': '$data.network_info.networks',
'netstat': '$data.network_info.netstat'}},

View File

@ -11,7 +11,7 @@ class T1082(AttackTechnique):
scanned_msg = ""
used_msg = "Monkey gathered system info from machines in the network."
query = [{'$match': {'telem_category': 'system_info'}},
query = [{'$match': {'telem_category': 'system_info', 'data.network_info': {'$exists': True}}},
{'$project': {'machine': {'hostname': '$data.hostname', 'ips': '$data.network_info.networks'},
'aws': '$data.aws',
'netstat': '$data.network_info.netstat',

View File

@ -773,6 +773,16 @@ class ReportService:
return False
@staticmethod
def delete_saved_report_if_exists():
"""
This function clears the saved report from the DB.
:raises RuntimeError if deletion failed
"""
delete_result = mongo.db.report.delete_many({})
if mongo.db.report.count_documents({}) != 0:
raise RuntimeError("Report cache not cleared. DeleteResult: " + delete_result.raw_result)
@staticmethod
def decode_dot_char_before_mongo_insert(report_dict):
"""

View File

@ -0,0 +1,31 @@
import uuid
from monkey_island.cc.models import Monkey
from monkey_island.cc.services.telemetry.processing.system_info_collectors.system_info_telemetry_dispatcher import \
SystemInfoTelemetryDispatcher
from monkey_island.cc.testing.IslandTestCase import IslandTestCase
class TestEnvironmentTelemetryProcessing(IslandTestCase):
def test_process_environment_telemetry(self):
self.fail_if_not_testing_env()
self.clean_monkey_db()
# Arrange
monkey_guid = str(uuid.uuid4())
a_monkey = Monkey(guid=monkey_guid)
a_monkey.save()
dispatcher = SystemInfoTelemetryDispatcher()
on_premise = "On Premise"
telem_json = {
"data": {
"collectors": {
"EnvironmentCollector": {"environment": on_premise},
}
},
"monkey_guid": monkey_guid
}
dispatcher.dispatch_collector_results_to_relevant_processors(telem_json)
self.assertEqual(Monkey.get_single_monkey_by_guid(monkey_guid).environment, on_premise)

View File

@ -9,7 +9,10 @@ export function renderMachine(val) {
/* Function takes data gathered from system info collector and creates a
string representation of machine from that data. */
export function renderMachineFromSystemData(data) {
let machineStr = data['hostname'] + ' ( ';
let machineStr = '';
if (typeof data['hostname'] !== 'undefined') {
machineStr = data['hostname'] + ' ( ';
}
data['ips'].forEach(function (ipInfo) {
if (typeof ipInfo === 'object') {
machineStr += ipInfo['addr'] + ', ';
@ -17,8 +20,12 @@ export function renderMachineFromSystemData(data) {
machineStr += ipInfo + ', ';
}
});
// Replaces " ," with " )" to finish a list of IP's
return machineStr.slice(0, -2) + ' )'
if (typeof data['hostname'] !== 'undefined') {
return machineStr.slice(0, -2) + ' )';
} else {
// Replaces " ," with " )" to finish a list of IP's
return machineStr.slice(0, -2);
}
}
/* Formats telemetry data that contains _id.machine and _id.usage fields into columns

View File

@ -17,7 +17,7 @@ class T1003 extends React.Component {
<br/>
{this.props.data.status === ScanStatus.USED ?
<StolenPasswordsComponent
data={this.props.reportData.glance.stolen_creds.concat(this.props.reportData.glance.ssh_keys)}/>
data={this.props.data.stolen_creds}/>
: ''}
</div>
);

View File

@ -12,13 +12,14 @@ class T1082 extends React.Component {
static getSystemInfoColumns() {
return ([{
columns: [
{
Header: 'Machine',
{ Header: 'Machine',
id: 'machine',
accessor: x => renderMachineFromSystemData(x.machine),
style: {'whiteSpace': 'unset'}
},
{Header: 'Gathered info', id: 'info', accessor: x => renderUsageFields(x.collections), style: {'whiteSpace': 'unset'}}
style: {'whiteSpace': 'unset'}},
{ Header: 'Gathered info',
id: 'info',
accessor: x => renderUsageFields(x.collections),
style: {'whiteSpace': 'unset'}}
]
}])
}

View File

@ -18,8 +18,14 @@ class MapPageComponent extends AuthComponent {
killPressed: false,
showKillDialog: false,
telemetry: [],
telemetryLastTimestamp: null
telemetryLastTimestamp: null,
isScrolledUp: false,
telemetryLines: 0,
telemetryCurrentLine: 0
};
this.telemConsole = React.createRef();
this.handleScroll = this.handleScroll.bind(this);
this.scrollTop = 0;
}
events = {
@ -67,6 +73,12 @@ class MapPageComponent extends AuthComponent {
telemetryLastTimestamp: res['timestamp']
});
this.props.onStatusChange();
let telemConsoleRef = this.telemConsole.current;
if (!this.state.isScrolledUp) {
telemConsoleRef.scrollTop = telemConsoleRef.scrollHeight - telemConsoleRef.clientHeight;
this.scrollTop = telemConsoleRef.scrollTop;
}
}
});
};
@ -138,9 +150,22 @@ class MapPageComponent extends AuthComponent {
);
}
handleScroll(e) {
let element = e.target;
let telemetryStyle = window.getComputedStyle(element);
let telemetryLineHeight = parseInt((telemetryStyle.lineHeight).replace('px', ''));
this.setState({
isScrolledUp: (element.scrollTop < this.scrollTop),
telemetryCurrentLine: Math.trunc(element.scrollTop/telemetryLineHeight)+1,
telemetryLines: Math.trunc(element.scrollHeight/telemetryLineHeight)
});
}
renderTelemetryConsole() {
return (
<div className="telemetry-console">
<div className="telemetry-console" onScroll={this.handleScroll} ref={this.telemConsole}>
{
this.state.telemetry.map(this.renderTelemetryEntry)
}
@ -148,6 +173,14 @@ class MapPageComponent extends AuthComponent {
);
}
renderTelemetryLineCount() {
return (
<div className="telemetry-lines">
<b>[{this.state.telemetryCurrentLine}/{this.state.telemetryLines}]</b>
</div>
);
}
render() {
return (
<div>
@ -170,6 +203,7 @@ class MapPageComponent extends AuthComponent {
<div style={{height: '80vh'}}>
<ReactiveGraph graph={this.state.graph} options={options} events={this.events}/>
</div>
{this.renderTelemetryLineCount()}
</Col>
<Col xs={4}>
<input className="form-control input-block"

View File

@ -324,6 +324,17 @@ body {
font-weight: bold;
}
.telemetry-lines {
z-index: 3;
position: absolute;
bottom: 103px;
right: 20px;
background: #000000cc;
border-radius: 5px;
padding: 1px;
color: white;
}
.map-legend {
font-size: 18px;
}
@ -582,4 +593,3 @@ body {
margin-left: auto;
margin-right: auto;
}