forked from p15670423/monkey
Merge remote-tracking branch 'origin/master'
This commit is contained in:
commit
cef2adc211
|
@ -0,0 +1,91 @@
|
|||
How to create a monkey build environment:
|
||||
|
||||
Windows:
|
||||
1. Install python 2.7
|
||||
https://www.python.org/download/releases/2.7
|
||||
2. install pywin32-219.win32-py2.7.exe
|
||||
http://sourceforge.net/projects/pywin32/files/pywin32/Build%20219/
|
||||
3. install VCForPython27.msi
|
||||
http://www.microsoft.com/en-us/download/details.aspx?id=44266
|
||||
4. Download & Run get-pip.py
|
||||
https://bootstrap.pypa.io/get-pip.py
|
||||
5. Run:
|
||||
setx path "%path%;C:\Python27\;C:\Python27\Scripts"
|
||||
python -m pip install enum34
|
||||
python -m pip install impacket
|
||||
python -m pip install PyCrypto
|
||||
python -m pip install pyasn1
|
||||
python -m pip install cffi
|
||||
python -m pip install twisted
|
||||
python -m pip install rdpy
|
||||
python -m pip install requests
|
||||
python -m pip install odict
|
||||
python -m pip install paramiko
|
||||
python -m pip install psutil
|
||||
python -m pip install PyInstaller
|
||||
type C:\Python27\Lib\site-packages\zope\__init__.py
|
||||
6. Put source code in C:\Code\monkey\chaos_monkey
|
||||
7. Download and extract UPX binary to C:\Code\monkey\chaos_monkey\bin\upx.exe:
|
||||
http://upx.sourceforge.net/download/upx391w.zip
|
||||
8. Run C:\Code\monkey\chaos_monkey\build_windows.bat to build, output is in dist\monkey.exe
|
||||
|
||||
Linux (Tested on Ubuntu 12.04):
|
||||
1. Run:
|
||||
sudo apt-get update
|
||||
apt-get install python-pip python-dev libffi-dev upx
|
||||
sudo pip install enum34
|
||||
sudo pip install impacket
|
||||
sudo pip install PyCrypto --upgrade
|
||||
sudo pip install pyasn1
|
||||
sudo pip install cffi
|
||||
sudo pip install zope.interface --upgrade
|
||||
sudo pip install twisted
|
||||
sudo pip install rdpy
|
||||
sudo pip install requests --upgrade
|
||||
sudo pip install odict
|
||||
sudo pip install paramiko
|
||||
sudo pip install psutil
|
||||
sudo pip install https://github.com/pyinstaller/pyinstaller/releases/download/3.0.dev2/PyInstaller-3.0.dev2.tar.gz
|
||||
sudo apt-get install winbind
|
||||
2. Put source code in /home/user/Code/monkey/chaos_monkey
|
||||
3. To build, run in terminal:
|
||||
cd /home/user/Code/monkey/chaos_monkey
|
||||
chmod +x build_linux.sh
|
||||
./build_linux.sh
|
||||
output is in dist/monkey
|
||||
|
||||
How to connect build environment to c&c:
|
||||
- will auto compile the source code stored in the c&c and update the c&c binaries accordingly
|
||||
Linux (Tested on Ubuntu 12.04):
|
||||
1. Setup c&c according to readme in monkey_island folder
|
||||
2. Install cifs:
|
||||
sudo apt-get install cifs-utils
|
||||
3. Run:
|
||||
mkdir /home/user/Code
|
||||
sudo mkdir /mnt/sources
|
||||
sudo mkdir /mnt/binaries
|
||||
4. Save username and password for c&c smb:
|
||||
echo username=<username> > /home/user/.smbcreds
|
||||
echo password=<password> >> /home/user/.smbcreds
|
||||
(Change <username> and <password> according to c&c)
|
||||
5. Edit fstab:
|
||||
run: sudo nano /etc/fstab
|
||||
add rows:
|
||||
//monkeycc/sources /mnt/sources cifs iocharset=utf-8,credentials=/home/user/.smbcreds,uid=1000 0 0
|
||||
//monkeycc/binaries /mnt/binaries cifs iocharset=utf-8,credentials=/home/user/.smbcreds,uid=1000 0 0
|
||||
6. Remount:
|
||||
sudo mount -a
|
||||
7. Check if sources exist in /mnt/sources
|
||||
If not, edit hosts file - add a line in /etc/hosts with c&c ip and hostname and remount.
|
||||
8. put build_from_cc.sh in /home/user and run with name of output binary (as appeared on c&c) as parameter,
|
||||
for example: build_from_cc.sh monkey-linux-32
|
||||
use Ctrl+C to manualy check compilation and Ctrl+\ to exit script.
|
||||
|
||||
Windows:
|
||||
1. Setup c&c according to readme in monkey_island folder
|
||||
2. Setup net use to c&c server:
|
||||
net use Z:\ \\monkeycc\sources /persistent:yes
|
||||
net use X:\ \\monkeycc\binaries /persistent:yes
|
||||
3. mkdir C:\Code
|
||||
4. Extract build_from_cc.bat to c:\code and run with name of output binary (as appeared on c&c) as parameter,
|
||||
for example: build_from_cc.bat monkey-windows-64.exe
|
|
@ -0,0 +1,13 @@
|
|||
enum34
|
||||
impacket
|
||||
PyCrypto
|
||||
pyasn1
|
||||
cffi
|
||||
twisted
|
||||
rdpy
|
||||
requests
|
||||
odict
|
||||
paramiko
|
||||
psutil
|
||||
PyInstaller
|
||||
ecdsa
|
Binary file not shown.
After Width: | Height: | Size: 160 B |
Binary file not shown.
After Width: | Height: | Size: 148 B |
Binary file not shown.
After Width: | Height: | Size: 201 B |
Binary file not shown.
After Width: | Height: | Size: 158 B |
Binary file not shown.
After Width: | Height: | Size: 146 B |
|
@ -48,7 +48,7 @@
|
|||
<div id="jobs" class="panel-body panel-collapse collapse in">
|
||||
<table class="table table-bordered table-hover" id="jobs-table">
|
||||
<thead>
|
||||
<tr><th>Time</th><th>Status</th><th>Data</th></tr>
|
||||
<tr><th>Id</th><th>Time</th><th>Type</th><th>Status</th><th>Properties</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
</tbody>
|
||||
|
@ -56,6 +56,22 @@
|
|||
</div>
|
||||
</div>
|
||||
<!-- /.Jobs section -->
|
||||
<!-- Log section -->
|
||||
<div class="panel panel-default">
|
||||
<div class="panel-heading">
|
||||
<a href="#logs" data-toggle="collapse">Log</a>
|
||||
</div>
|
||||
<div id="logs" class="panel-body panel-collapse collapse in">
|
||||
<table class="table table-bordered table-hover" id="logs-table">
|
||||
<thead>
|
||||
<tr><th>Time</th><th>Data</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /.Log section -->
|
||||
</div>
|
||||
<!-- /.Main section -->
|
||||
|
||||
|
@ -83,7 +99,17 @@
|
|||
<a href="#newJob" data-toggle="collapse">New Job</a>
|
||||
</div>
|
||||
<div id="newJob" style="overflow: visible" class="panel-body panel-collapse collapse in" aria-expanded="true">
|
||||
<div id="job-config">
|
||||
<div id="job-config-section">
|
||||
<div id="job-config">
|
||||
</div>
|
||||
<button id="btnSendJob" class="btn btn-default" type="button"
|
||||
onclick="sendJob()" style="margin-top:-4px; visibility: hidden">
|
||||
Update
|
||||
</button>
|
||||
<button id="btnDeleteJob" class="btn btn-default" type="button"
|
||||
onclick="deleteJob()" style="margin-top:-4px; visibility: hidden">
|
||||
Delete
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -1,18 +1,8 @@
|
|||
/*const jsonFile = "/api/jbos";
|
||||
var monkeys = null;
|
||||
var generationDate = null;*/
|
||||
|
||||
// The JSON must be fully loaded before onload() happens for calling draw() on 'monkeys'
|
||||
$.ajaxSetup({
|
||||
async: false
|
||||
});
|
||||
|
||||
// Reading the JSON file containing the monkeys' informations
|
||||
/*$.getJSON(jsonFile, function(json) {
|
||||
jobs = json.objects;
|
||||
generationDate = json.timestamp;
|
||||
});*/
|
||||
|
||||
// Images/icons constants
|
||||
const ICONS_DIR = "./css/img/objects/";
|
||||
const ICONS_EXT = ".png";
|
||||
|
@ -21,16 +11,35 @@ const ICONS_EXT = ".png";
|
|||
// If variable from local storage != null, assign it, otherwise set it's default value.
|
||||
|
||||
var jobsTable = undefined;
|
||||
var logsTable = undefined;
|
||||
var vcenterCfg = undefined;
|
||||
var jobCfg = undefined;
|
||||
var selectedJob = undefined;
|
||||
|
||||
JSONEditor.defaults.theme = 'bootstrap3';
|
||||
|
||||
|
||||
function initAdmin() {
|
||||
|
||||
jobsTable = $("#jobs-table").DataTable({
|
||||
"ordering": true,
|
||||
"order": [[1, "desc"]],
|
||||
});
|
||||
logsTable = $("#logs-table").DataTable({
|
||||
"ordering": false,
|
||||
});
|
||||
jobsTable.on( 'click', 'tr', function () {
|
||||
if ( $(this).hasClass('selected') ) {
|
||||
$(this).removeClass('selected');
|
||||
}
|
||||
else {
|
||||
jobsTable.$('tr.selected').removeClass('selected');
|
||||
$(this).addClass('selected');
|
||||
}
|
||||
jobdata = jobsTable.row(this).data();
|
||||
selectedJob = jobdata[0];
|
||||
createNewJob(selectedJob, jobdata[3]);
|
||||
showLog(selectedJob);
|
||||
} );
|
||||
|
||||
vcenterCfg = new JSONEditor(document.getElementById('vcenter-config'),{
|
||||
schema: {
|
||||
|
@ -75,7 +84,8 @@ function initAdmin() {
|
|||
},
|
||||
datacenter_name: {
|
||||
title: "Datacenter (opt.)",
|
||||
type: "string", },
|
||||
type: "string",
|
||||
},
|
||||
cluster_name: {
|
||||
title: "Cluster (opt.)",
|
||||
type: "string",
|
||||
|
@ -93,31 +103,45 @@ function initAdmin() {
|
|||
},
|
||||
disable_edit_json: false,
|
||||
disable_properties: true,
|
||||
startval: $,
|
||||
});
|
||||
|
||||
window.setTimeout(updateJobs, 10000);
|
||||
setInterval(updateJobs, 5000);
|
||||
setInterval(showLog, 5000);
|
||||
loadVcenterConfig();
|
||||
updateJobs();
|
||||
|
||||
}
|
||||
|
||||
function updateVCenterConf() {
|
||||
function showLog() {
|
||||
logsTable.clear();
|
||||
|
||||
if (!selectedJob) {
|
||||
return;
|
||||
}
|
||||
|
||||
$.getJSON('/job?action=log&id=' + selectedJob, function(json) {
|
||||
var logsList = json.log;
|
||||
for (var i = 0; i < logsList.length; i++) {
|
||||
logsTable.row.add([logsList[i][0], logsList[i][1]]);
|
||||
}
|
||||
|
||||
logsTable.draw();
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function updateJobs() {
|
||||
$.getJSON('/job', function(json) {
|
||||
jobsTable.clear();
|
||||
var jobs = json.objects;
|
||||
var jobsList = json.objects;
|
||||
|
||||
for (var i = 0; i < jobs.length; i++) {
|
||||
jobsTable.row.add([jobs[i].timestamp, jobs[i].status, JSON.stringify(jobs[i].data)]);
|
||||
for (var i = 0; i < jobsList.length; i++) {
|
||||
jobsTable.row.add([jobsList[i].id, jobsList[i].creation_time, jobsList[i].type,jobsList[i].execution.state, JSON.stringify(jobsList[i].properties)]);
|
||||
}
|
||||
|
||||
jobsTable.draw();
|
||||
//enableJobsSelect();
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
function loadVcenterConfig() {
|
||||
|
@ -152,60 +176,114 @@ function updateVcenterConfig() {
|
|||
|
||||
}
|
||||
|
||||
function createNewJob() {
|
||||
function emptySelection() {
|
||||
showLog();
|
||||
selectedJob = undefined;
|
||||
jobsTable.$('tr.selected').removeClass('selected');
|
||||
}
|
||||
|
||||
function createNewJob(id, state) {
|
||||
if (!id) {
|
||||
emptySelection();
|
||||
}
|
||||
|
||||
elem = document.getElementById('job-config');
|
||||
elem.innerHTML = ""
|
||||
jobCfg = new JSONEditor(elem,{
|
||||
schema: {
|
||||
type: "object",
|
||||
title: "Job",
|
||||
properties: {
|
||||
job: {
|
||||
title: "Type",
|
||||
$ref: "/jobcreate",
|
||||
}
|
||||
},
|
||||
options: {
|
||||
"collapsed": false
|
||||
},
|
||||
},
|
||||
ajax: true,
|
||||
disable_edit_json: false,
|
||||
disable_collapse: true,
|
||||
disable_properties: true,
|
||||
});
|
||||
schema: {
|
||||
type: "object",
|
||||
title: "Job",
|
||||
properties: {
|
||||
job: {
|
||||
title: "Type",
|
||||
$ref: "/jobcreate" + ((id)?"?id="+id:""),
|
||||
}
|
||||
},
|
||||
options: {
|
||||
"collapsed": false
|
||||
},
|
||||
},
|
||||
ajax: true,
|
||||
disable_edit_json: false,
|
||||
disable_collapse: true,
|
||||
disable_properties: true,
|
||||
no_additional_properties: true
|
||||
});
|
||||
|
||||
jobCfg.on('ready',function() {
|
||||
if (id && state != "pending") {
|
||||
jobCfg.disable();
|
||||
document.getElementById("btnSendJob").style.visibility = "hidden";
|
||||
document.getElementById("btnDeleteJob").style.visibility = "hidden";
|
||||
}
|
||||
else {
|
||||
jobCfg.enable();
|
||||
document.getElementById("btnSendJob").style.visibility = "visible";
|
||||
if (id) {
|
||||
document.getElementById("btnDeleteJob").style.visibility = "visible";
|
||||
}
|
||||
else {
|
||||
document.getElementById("btnDeleteJob").style.visibility = "hidden";
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function sendJob() {
|
||||
var job_config = jobCfg.getValue()
|
||||
|
||||
$.ajax({
|
||||
headers : {
|
||||
'Accept' : 'application/json',
|
||||
'Content-Type' : 'application/json'
|
||||
},
|
||||
url : '/jobcreate',
|
||||
type : 'POST',
|
||||
data : JSON.stringify(job_config.job),
|
||||
success : function(response, textStatus, jqXhr) {
|
||||
console.log("Job successfully updated!");
|
||||
updateJobs();
|
||||
},
|
||||
error : function(jqXHR, textStatus, errorThrown) {
|
||||
// log the error to the console
|
||||
console.log("The following error occured: " + textStatus, errorThrown);
|
||||
},
|
||||
complete : function() {
|
||||
console.log("Sending job config...");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function deleteJob() {
|
||||
var job_config = jobCfg.getValue();
|
||||
if (job_config.job.id) {
|
||||
$.ajax({
|
||||
headers : {
|
||||
'Accept' : 'application/json',
|
||||
'Content-Type' : 'application/json'
|
||||
},
|
||||
url : '/jobcreate',
|
||||
type : 'GET',
|
||||
data : "action=delete&id=" + job_config.job.id,
|
||||
success : function(response, textStatus, jqXhr) {
|
||||
console.log("Job successfully updated!");
|
||||
updateJobs();
|
||||
},
|
||||
error : function(jqXHR, textStatus, errorThrown) {
|
||||
// log the error to the console
|
||||
console.log("The following error occured: " + textStatus, errorThrown);
|
||||
},
|
||||
complete : function() {
|
||||
console.log("Sending job config...");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function configSched() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Manage the event when an object is selected
|
||||
*/
|
||||
function onSelect(properties) {
|
||||
|
||||
/*if (properties.nodes.length > 0) {
|
||||
onNodeSelect(properties.nodes);
|
||||
}
|
||||
else
|
||||
{
|
||||
var content = "<b>No selection</b>"
|
||||
$("#selectionInfo").html(content);
|
||||
$('#monkey-config').hide()
|
||||
$('#btnConfigLoad, #btnConfigUpdate').hide();
|
||||
telemTable.clear();
|
||||
telemTable.draw();
|
||||
}*/
|
||||
|
||||
/*if (properties.edges.length > 0) {
|
||||
onEdgeSelect(properties.edges);
|
||||
}*/
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Clears the value in the local storage
|
||||
*/
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
from connectors.vcenter import VCenterJob, VCenterConnector
|
||||
from connectors.demo import DemoJob, DemoConnector
|
||||
|
||||
available_jobs = [VCenterJob, DemoJob]
|
||||
|
||||
|
||||
def get_connector_by_name(name):
|
||||
for jobclass in available_jobs:
|
||||
if name == jobclass.connector_type.__name__:
|
||||
return jobclass.connector_type()
|
||||
return None
|
||||
|
||||
|
||||
def get_jobclass_by_name(name):
|
||||
for jobclass in available_jobs:
|
||||
if jobclass.__name__ == name:
|
||||
return jobclass
|
||||
|
||||
|
||||
def refresh_connector_config(mongo, connector):
|
||||
properties = mongo.db.connector.find_one({"type": connector.__class__.__name__})
|
||||
if properties:
|
||||
connector.load_properties(properties)
|
||||
|
||||
|
||||
def load_connector(mongo, name):
|
||||
con = get_connector_by_name(name)
|
||||
if not con:
|
||||
return None
|
||||
refresh_connector_config(mongo, con)
|
||||
return con
|
|
@ -1,17 +1,17 @@
|
|||
def _load_prop_dict(self, target, prop):
|
||||
for property in prop:
|
||||
if not target.has_key(property):
|
||||
continue
|
||||
if type(prop[property]) is dict:
|
||||
_load_prop_dict(self, target[property], prop[property])
|
||||
else:
|
||||
target[property] = prop[property]
|
||||
|
||||
|
||||
class NetControllerConnector(object):
|
||||
def __init__(self):
|
||||
self._properties = {}
|
||||
|
||||
def _load_prop_dict(self, target, prop):
|
||||
for property in prop:
|
||||
if not target.has_key(property):
|
||||
continue
|
||||
if type(prop[property]) is dict:
|
||||
self._load_prop_dict(target[property], prop[property])
|
||||
else:
|
||||
target[property] = prop[property]
|
||||
|
||||
def is_connected(self):
|
||||
return False
|
||||
|
||||
|
@ -22,7 +22,7 @@ class NetControllerConnector(object):
|
|||
return self._properties
|
||||
|
||||
def load_properties(self, properties):
|
||||
self._load_prop_dict(self._properties, properties)
|
||||
_load_prop_dict(self, self._properties, properties)
|
||||
|
||||
def get_vlans_list(self):
|
||||
raise NotImplementedError()
|
||||
|
@ -36,19 +36,48 @@ class NetControllerConnector(object):
|
|||
def disconnect(self):
|
||||
return
|
||||
|
||||
class NetControllerJob(object):
|
||||
connector = NetControllerConnector
|
||||
def log(self, text):
|
||||
pass
|
||||
|
||||
def __init__(self):
|
||||
self._properties = {
|
||||
# property: [value, enumerating_function]
|
||||
}
|
||||
def set_logger(self, logger):
|
||||
self.log = logger
|
||||
|
||||
class NetControllerJob(object):
|
||||
connector_type = NetControllerConnector
|
||||
_connector = None
|
||||
_logger = None
|
||||
|
||||
_properties = {
|
||||
# property: value
|
||||
}
|
||||
|
||||
_enumerations = {
|
||||
|
||||
}
|
||||
|
||||
def __init__(self, existing_connector=None, logger=None):
|
||||
self._connector = existing_connector
|
||||
self._logger = logger
|
||||
if logger:
|
||||
self._connector.set_logger(self.log)
|
||||
|
||||
def log(self, text):
|
||||
if self._logger:
|
||||
self._logger.log(text)
|
||||
|
||||
def get_job_properties(self):
|
||||
return self._properties
|
||||
|
||||
def set_job_properties(self, properties):
|
||||
return {}
|
||||
def load_job_properties(self, properties):
|
||||
_load_prop_dict(self, self._properties, properties)
|
||||
|
||||
def get_property_function(self, property):
|
||||
if property in self._enumerations.keys():
|
||||
return self._enumerations[property]
|
||||
return None
|
||||
|
||||
def run(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_results(self):
|
||||
return []
|
|
@ -35,9 +35,15 @@ class DemoConnector(NetControllerConnector):
|
|||
return []
|
||||
|
||||
class DemoJob(NetControllerJob):
|
||||
connector = DemoConnector
|
||||
connector_type = DemoConnector
|
||||
_properties = {
|
||||
"vlan": 0,
|
||||
}
|
||||
_enumerations = {
|
||||
"vlan": "get_vlans_list",
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self._properties = {
|
||||
"vlan": [0, "get_vlans_list"],
|
||||
}
|
||||
def run(self):
|
||||
import time
|
||||
self.log("Running demo job...")
|
||||
time.sleep(30)
|
|
@ -63,20 +63,42 @@ class VCenterConnector(NetControllerConnector):
|
|||
def get_entities_on_vlan(self, vlanid):
|
||||
return []
|
||||
|
||||
def deploy_monkey(self, vlanid):
|
||||
def deploy_monkey(self, vm_name):
|
||||
if not self._properties["monkey_template_name"]:
|
||||
raise Exception("Monkey template not configured")
|
||||
|
||||
if not self.is_connected():
|
||||
self.connect()
|
||||
|
||||
vcontent = self._service_instance.RetrieveContent() # get updated vsphare state
|
||||
monkey_template = self._get_obj(vcontent, [vim.VirtualMachine], self._properties["monkey_template_name"])
|
||||
if not monkey_template:
|
||||
raise Exception("Monkey template not found")
|
||||
|
||||
task = self._clone_vm(vcontent, monkey_template)
|
||||
if not task:
|
||||
self.log("Cloning vm: (%s -> %s)" % (monkey_template, vm_name))
|
||||
monkey_vm = self._clone_vm(vcontent, monkey_template, vm_name)
|
||||
if not monkey_vm:
|
||||
raise Exception("Error deploying monkey VM")
|
||||
self.log("Finished cloning")
|
||||
|
||||
monkey_vm = task.entity
|
||||
return monkey_vm
|
||||
|
||||
def set_network(self, vm_obj, vlan_name):
|
||||
if not self.is_connected():
|
||||
self.connect()
|
||||
vcontent = self._service_instance.RetrieveContent() # get updated vsphare state
|
||||
dvs_pg = self._get_obj(vcontent, [vim.dvs.DistributedVirtualPortgroup], vlan_name)
|
||||
nic = self._get_vm_nic(vm_obj)
|
||||
virtual_nic_spec = self._create_nic_spec(nic, dvs_pg)
|
||||
dev_changes = [virtual_nic_spec]
|
||||
spec = vim.vm.ConfigSpec()
|
||||
spec.deviceChange = dev_changes
|
||||
task = vm_obj.ReconfigVM_Task(spec=spec)
|
||||
return self._wait_for_task(task)
|
||||
|
||||
def power_on(self, vm_obj):
|
||||
task = vm_obj.PowerOnVM_Task()
|
||||
return self._wait_for_task(task)
|
||||
|
||||
def disconnect(self):
|
||||
Disconnect(self._service_instance)
|
||||
|
@ -86,7 +108,37 @@ class VCenterConnector(NetControllerConnector):
|
|||
if self._service_instance:
|
||||
self.disconnect()
|
||||
|
||||
def _clone_vm(self, vcontent, vm):
|
||||
def _get_vm_nic(self, vm_obj):
|
||||
for dev in vm_obj.config.hardware.device:
|
||||
if isinstance(dev, vim.vm.device.VirtualEthernetCard):
|
||||
return dev
|
||||
return None
|
||||
|
||||
def _create_nic_spec(self, virtual_nic_device, dvs_pg):
|
||||
virtual_nic_spec = vim.vm.device.VirtualDeviceSpec()
|
||||
virtual_nic_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
|
||||
virtual_nic_spec.device = virtual_nic_device
|
||||
virtual_nic_spec.device.key = virtual_nic_device.key
|
||||
virtual_nic_spec.device.macAddress = virtual_nic_device.macAddress
|
||||
virtual_nic_spec.device.wakeOnLanEnabled = virtual_nic_device.wakeOnLanEnabled
|
||||
|
||||
virtual_nic_spec.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo()
|
||||
virtual_nic_spec.device.connectable.startConnected = True
|
||||
virtual_nic_spec.device.connectable.connected = True
|
||||
virtual_nic_spec.device.connectable.allowGuestControl = True
|
||||
|
||||
# configure port connection object on the requested dvs port group
|
||||
dvs_port_connection = vim.dvs.PortConnection()
|
||||
dvs_port_connection.portgroupKey = dvs_pg.key
|
||||
dvs_port_connection.switchUuid = dvs_pg.config.distributedVirtualSwitch.uuid
|
||||
|
||||
# assign port to device
|
||||
virtual_nic_spec.device.backing = vim.vm.device.VirtualEthernetCard.DistributedVirtualPortBackingInfo()
|
||||
virtual_nic_spec.device.backing.port = dvs_port_connection
|
||||
|
||||
return virtual_nic_spec
|
||||
|
||||
def _clone_vm(self, vcontent, vm, name):
|
||||
|
||||
# get vm target folder
|
||||
if self._properties["monkey_vm_info"]["vm_folder"]:
|
||||
|
@ -101,7 +153,7 @@ class VCenterConnector(NetControllerConnector):
|
|||
else:
|
||||
datastore = self._get_obj(vcontent, [vim.Datastore], vm.datastore[0].info.name)
|
||||
|
||||
# get vm target resoucepool
|
||||
# get vm target resource pool
|
||||
if self._properties["monkey_vm_info"]["resource_pool"]:
|
||||
resource_pool = self._get_obj(vcontent, [vim.ResourcePool], self._properties["monkey_vm_info"]["resource_pool"])
|
||||
else:
|
||||
|
@ -116,20 +168,28 @@ class VCenterConnector(NetControllerConnector):
|
|||
clonespec = vim.vm.CloneSpec()
|
||||
clonespec.location = relospec
|
||||
|
||||
task = vm.Clone(folder=destfolder, name=self._properties["monkey_vm_info"]["name"], spec=clonespec)
|
||||
self.log("Starting clone task with the following info: %s" % repr({"folder": destfolder, "name": name, "clonespec": clonespec}))
|
||||
|
||||
task = vm.Clone(folder=destfolder, name=name, spec=clonespec)
|
||||
return self._wait_for_task(task)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _wait_for_task(task):
|
||||
def _wait_for_task(self, task):
|
||||
""" wait for a vCenter task to finish """
|
||||
task_done = False
|
||||
while not task_done:
|
||||
if task.info.state == 'success':
|
||||
return task.info.result
|
||||
if task.info.result:
|
||||
return task.info.result
|
||||
else:
|
||||
return True
|
||||
|
||||
if task.info.state == 'error':
|
||||
self.log("Error waiting for task: %s" % repr(task.info))
|
||||
return None
|
||||
if task.info.state == 'success':
|
||||
return task.info.result
|
||||
return None
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _get_obj(content, vimtype, name):
|
||||
|
@ -153,10 +213,33 @@ class VCenterConnector(NetControllerConnector):
|
|||
|
||||
|
||||
class VCenterJob(NetControllerJob):
|
||||
connector = VCenterConnector
|
||||
connector_type = VCenterConnector
|
||||
_vm_obj = None
|
||||
_properties = {
|
||||
"vlan": "",
|
||||
"vm_name": "",
|
||||
}
|
||||
_enumerations = {
|
||||
"vlan": "get_vlans_list",
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self._properties = {
|
||||
"vlan": [0, "get_vlans_list"],
|
||||
}
|
||||
def run(self):
|
||||
if not self._connector:
|
||||
return False
|
||||
|
||||
monkey_vm = self._connector.deploy_monkey(self._properties["vm_name"])
|
||||
if not monkey_vm:
|
||||
return False
|
||||
|
||||
self._vm_obj = monkey_vm
|
||||
|
||||
self.log("Setting vm network")
|
||||
if not self._connector.set_network(monkey_vm, self._properties["vlan"]):
|
||||
return False
|
||||
|
||||
self.log("Powering on vm")
|
||||
if not self._connector.power_on(monkey_vm):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
SRV_ADDRESS = 'localhost:27017'
|
||||
BROKER_URL = 'mongodb://%(srv)s/monkeybusiness' % {'srv': SRV_ADDRESS}
|
||||
MONGO_URI = BROKER_URL
|
||||
CELERY_RESULT_BACKEND = 'mongodb://%(srv)s/' % {'srv': SRV_ADDRESS}
|
||||
CELERY_MONGODB_BACKEND_SETTINGS = {
|
||||
'database': 'monkeybusiness',
|
||||
'taskmeta_collection': 'celery_taskmeta',
|
||||
}
|
||||
#CELERYD_LOG_FILE="../celery.log"
|
|
@ -1,5 +1,3 @@
|
|||
import os
|
||||
import sys
|
||||
from flask import Flask, request, abort, send_from_directory
|
||||
from flask.ext import restful
|
||||
from flask.ext.pymongo import PyMongo
|
||||
|
@ -7,20 +5,13 @@ from flask import make_response
|
|||
import bson.json_util
|
||||
import json
|
||||
from datetime import datetime
|
||||
import dateutil.parser
|
||||
from connectors.vcenter import VCenterJob, VCenterConnector
|
||||
from connectors.demo import DemoJob, DemoConnector
|
||||
|
||||
MONGO_URL = os.environ.get('MONGO_URL')
|
||||
if not MONGO_URL:
|
||||
MONGO_URL = "mongodb://localhost:27017/monkeybusiness"
|
||||
from common import *
|
||||
import tasks_manager
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config['MONGO_URI'] = MONGO_URL
|
||||
app.config.from_object('dbconfig')
|
||||
mongo = PyMongo(app)
|
||||
|
||||
available_jobs = [VCenterJob, DemoJob]
|
||||
|
||||
active_connectors = {}
|
||||
|
||||
class Root(restful.Resource):
|
||||
|
@ -33,18 +24,21 @@ class Root(restful.Resource):
|
|||
|
||||
class Job(restful.Resource):
|
||||
def get(self, **kw):
|
||||
id = kw.get('id')
|
||||
id = request.args.get('id')
|
||||
timestamp = request.args.get('timestamp')
|
||||
action = request.args.get('action')
|
||||
|
||||
if action == "log":
|
||||
return {"log": get_job_log(id)}
|
||||
|
||||
result = {}
|
||||
|
||||
if (id):
|
||||
return mongo.db.job.find_one_or_404({"id": id})
|
||||
return mongo.db.job.find_one_or_404({"_id": id})
|
||||
else:
|
||||
result = {'timestamp': datetime.now().isoformat()}
|
||||
result['timestamp'] = datetime.now().isoformat()
|
||||
|
||||
find_filter = {}
|
||||
if None != timestamp:
|
||||
find_filter['modifytime'] = {'$gt': dateutil.parser.parse(timestamp)}
|
||||
result['objects'] = [x for x in mongo.db.job.find(find_filter)]
|
||||
result['objects'] = [x for x in mongo.db.job.find().sort("creation_time", -1)]
|
||||
return result
|
||||
|
||||
def post(self, **kw):
|
||||
|
@ -67,62 +61,99 @@ class Job(restful.Resource):
|
|||
{"$set": job_json},
|
||||
upsert=True)
|
||||
|
||||
|
||||
class Connector(restful.Resource):
|
||||
def get(self, **kw):
|
||||
type = request.args.get('type')
|
||||
if (type == 'VCenterConnector'):
|
||||
vcenter = VCenterConnector()
|
||||
properties = mongo.db.connector.find_one({"type": 'VCenterConnector'})
|
||||
if properties:
|
||||
vcenter.load_properties(properties)
|
||||
ret = vcenter.get_properties()
|
||||
ret["password"] = "" # for better security, don't expose password
|
||||
return ret
|
||||
return {}
|
||||
contype = request.args.get('type')
|
||||
|
||||
# if no type given - return list of types
|
||||
if not contype:
|
||||
conlist = []
|
||||
for jobclass in available_jobs:
|
||||
if jobclass.connector_type.__name__ not in conlist:
|
||||
conlist.append(jobclass.connector_type.__name__)
|
||||
return {"oneOf": conlist}
|
||||
|
||||
con = get_connector_by_name(contype)
|
||||
if not con:
|
||||
return {}
|
||||
properties = mongo.db.connector.find_one({"type": con.__class__.__name__})
|
||||
if properties:
|
||||
con.load_properties(properties)
|
||||
ret = con.get_properties()
|
||||
ret["password"] = "" # for better security, don't expose password
|
||||
return ret
|
||||
|
||||
def post(self, **kw):
|
||||
settings_json = json.loads(request.data)
|
||||
if (settings_json.get("type") == 'VCenterConnector'):
|
||||
contype = settings_json.get("type")
|
||||
|
||||
# preserve password
|
||||
properties = mongo.db.connector.find_one({"type": 'VCenterConnector'})
|
||||
if properties and (not settings_json.has_key("password") or not settings_json["password"]):
|
||||
settings_json["password"] = properties.get("password")
|
||||
# preserve password if empty given
|
||||
properties = mongo.db.connector.find_one({"type": contype})
|
||||
if properties and (not settings_json.has_key("password") or not settings_json["password"]):
|
||||
settings_json["password"] = properties.get("password")
|
||||
|
||||
return mongo.db.connector.update({"type": 'VCenterConnector'},
|
||||
{"$set": settings_json},
|
||||
upsert=True)
|
||||
return mongo.db.connector.update({"type": contype},
|
||||
{"$set": settings_json},
|
||||
upsert=True)
|
||||
|
||||
class JobCreation(restful.Resource):
|
||||
def get(self, **kw):
|
||||
jobtype = request.args.get('type')
|
||||
if not jobtype:
|
||||
action = request.args.get('action')
|
||||
jobid = request.args.get('id')
|
||||
if not (jobtype or jobid):
|
||||
res = []
|
||||
update_connectors()
|
||||
for con in available_jobs:
|
||||
if con.connector.__name__ in active_connectors:
|
||||
if con.connector_type.__name__ in active_connectors:
|
||||
res.append({"title": con.__name__, "$ref": "/jobcreate?type=" + con.__name__})
|
||||
return {"oneOf": res}
|
||||
|
||||
job = None
|
||||
for jobclass in available_jobs:
|
||||
if jobclass.__name__ == jobtype:
|
||||
job = jobclass()
|
||||
if not jobid:
|
||||
job = get_jobclass_by_name(jobtype)()
|
||||
else:
|
||||
loaded_job = mongo.db.job.find_one({"_id": bson.ObjectId(jobid)})
|
||||
if loaded_job:
|
||||
job = get_jobclass_by_name(loaded_job.get("type"))()
|
||||
job.load_job_properties(loaded_job.get("properties"))
|
||||
|
||||
if action == "delete":
|
||||
if loaded_job.get("execution")["state"] == "pending":
|
||||
mongo.db.job.remove({"_id": bson.ObjectId(jobid)})
|
||||
return {'status': 'ok'}
|
||||
else:
|
||||
return {'status': 'bad state'}
|
||||
|
||||
if job and job.connector_type.__name__ in active_connectors.keys():
|
||||
properties = {
|
||||
"type": {
|
||||
"type": "enum",
|
||||
"enum": [job.__class__.__name__],
|
||||
"options": {"hidden": True}
|
||||
}
|
||||
}
|
||||
if (jobid):
|
||||
properties["_id"] = {
|
||||
"type": "enum",
|
||||
"enum": [jobid],
|
||||
"name": "ID",
|
||||
}
|
||||
|
||||
if job and job.connector.__name__ in active_connectors.keys():
|
||||
properties = dict()
|
||||
job_prop = job.get_job_properties()
|
||||
|
||||
for prop in job_prop:
|
||||
properties[prop] = dict({})
|
||||
if type(job_prop[prop][0]) is int:
|
||||
properties[prop]["default"] = job_prop[prop]
|
||||
if type(job_prop[prop]) is int:
|
||||
properties[prop]["type"] = "number"
|
||||
elif type(job_prop[prop][0]) is bool:
|
||||
elif type(job_prop[prop]) is bool:
|
||||
properties[prop]["type"] = "boolean"
|
||||
else:
|
||||
properties[prop]["type"] = "string"
|
||||
if job_prop[prop][1]:
|
||||
properties[prop]["enum"] = list(active_connectors[job.connector.__name__].__getattribute__(job_prop[prop][1])())
|
||||
enum = job.get_property_function(prop)
|
||||
if enum:
|
||||
properties[prop]["enum"] = list(active_connectors[job.connector_type.__name__].__getattribute__(enum)())
|
||||
|
||||
res = dict({
|
||||
"title": "%s Job" % jobtype,
|
||||
|
@ -137,6 +168,45 @@ class JobCreation(restful.Resource):
|
|||
|
||||
return {}
|
||||
|
||||
def post(self, **kw):
|
||||
settings_json = json.loads(request.data)
|
||||
jobtype = settings_json.get("type")
|
||||
jobid = settings_json.get("id")
|
||||
job = None
|
||||
for jobclass in available_jobs:
|
||||
if jobclass.__name__ == jobtype:
|
||||
job = jobclass()
|
||||
if not job:
|
||||
return {'status': 'bad type'}
|
||||
|
||||
# params validation
|
||||
job.load_job_properties(settings_json)
|
||||
parsed_prop = job.get_job_properties()
|
||||
if jobid:
|
||||
res = mongo.db.job.update({"_id": bson.ObjectId(jobid)},
|
||||
{"$set": {"properties": parsed_prop}})
|
||||
if res and (res["ok"] == 1):
|
||||
return {'status': 'ok', 'updated': res["nModified"]}
|
||||
else:
|
||||
return {'status': 'failed'}
|
||||
|
||||
else:
|
||||
execution_state = {"taskid": "",
|
||||
"state" : "pending"}
|
||||
new_job = {
|
||||
"creation_time": datetime.now(),
|
||||
"type": jobtype,
|
||||
"properties": parsed_prop,
|
||||
"execution": execution_state,
|
||||
}
|
||||
jobid = mongo.db.job.insert(new_job)
|
||||
async = tasks_manager.run_task.delay(jobid)
|
||||
execution_state["taskid"] = async.id
|
||||
mongo.db.job.update({"_id": jobid},
|
||||
{"$set": {"execution": execution_state}})
|
||||
|
||||
return {'status': 'created'}
|
||||
|
||||
|
||||
def normalize_obj(obj):
|
||||
if obj.has_key('_id') and not obj.has_key('id'):
|
||||
|
@ -164,20 +234,20 @@ def output_json(obj, code, headers=None):
|
|||
return resp
|
||||
|
||||
|
||||
def refresh_connector_config(name):
|
||||
properties = mongo.db.connector.find_one({"type": name})
|
||||
if properties:
|
||||
active_connectors[name].load_properties(properties)
|
||||
|
||||
def get_job_log(jobid):
|
||||
res = mongo.db.results.find_one({"jobid": bson.ObjectId(jobid)})
|
||||
if res:
|
||||
return res["log"]
|
||||
return []
|
||||
|
||||
def update_connectors():
|
||||
for con in available_jobs:
|
||||
connector_name = con.connector.__name__
|
||||
connector_name = con.connector_type.__name__
|
||||
if connector_name not in active_connectors:
|
||||
active_connectors[connector_name] = con.connector()
|
||||
active_connectors[connector_name] = con.connector_type()
|
||||
|
||||
if not active_connectors[connector_name].is_connected():
|
||||
refresh_connector_config(connector_name)
|
||||
refresh_connector_config(mongo, active_connectors[connector_name])
|
||||
try:
|
||||
app.logger.info("Trying to activate connector: %s" % connector_name)
|
||||
active_connectors[connector_name].connect()
|
||||
|
@ -185,7 +255,9 @@ def update_connectors():
|
|||
active_connectors.pop(connector_name)
|
||||
app.logger.info("Error activating connector: %s, reason: %s" % (connector_name, e))
|
||||
|
||||
|
||||
@app.before_first_request
|
||||
def init():
|
||||
update_connectors()
|
||||
|
||||
@app.route('/admin/<path:path>')
|
||||
def send_admin(path):
|
||||
|
|
|
@ -0,0 +1,125 @@
|
|||
import time
|
||||
from flask import Flask
|
||||
from datetime import datetime
|
||||
from flask.ext.pymongo import PyMongo
|
||||
from celery import Celery
|
||||
from common import *
|
||||
|
||||
def make_celery(app):
|
||||
celery = Celery(main='MONKEY_TASKS', backend=app.config['CELERY_RESULT_BACKEND'],
|
||||
broker=app.config['BROKER_URL'])
|
||||
celery.conf.update(app.config)
|
||||
TaskBase = celery.Task
|
||||
class ContextTask(TaskBase):
|
||||
abstract = True
|
||||
def __call__(self, *args, **kwargs):
|
||||
with app.app_context():
|
||||
return TaskBase.__call__(self, *args, **kwargs)
|
||||
celery.Task = ContextTask
|
||||
return celery
|
||||
|
||||
fapp = Flask(__name__)
|
||||
fapp.config.from_object('dbconfig')
|
||||
celery = make_celery(fapp)
|
||||
mongo = PyMongo(fapp)
|
||||
|
||||
|
||||
class JobExecution(object):
|
||||
_jobinfo = None
|
||||
_job = None
|
||||
_mongo = None
|
||||
_log = []
|
||||
|
||||
def __init__(self, mongo, jobinfo):
|
||||
self._mongo = mongo
|
||||
self._jobinfo = jobinfo
|
||||
self.update_job_state("processing")
|
||||
|
||||
job_class = get_jobclass_by_name(self._jobinfo["type"])
|
||||
con = job_class.connector_type()
|
||||
refresh_connector_config(self._mongo, con)
|
||||
self._job = job_class(con, self)
|
||||
self._job.load_job_properties(self._jobinfo["properties"])
|
||||
|
||||
def get_job(self):
|
||||
return self._job
|
||||
|
||||
def refresh_job_info(self):
|
||||
self._jobinfo = self._mongo.db.job.find_one({"_id": self._jobinfo["_id"]})
|
||||
|
||||
def update_job_state(self, state):
|
||||
self._jobinfo["execution"]["state"] = state
|
||||
self._mongo.db.job.update({"_id": self._jobinfo["_id"]},
|
||||
{"$set": {"execution": self._jobinfo["execution"]}})
|
||||
|
||||
def _log_resutls(self, res):
|
||||
self._mongo.db.results.update({"jobid": self._jobinfo["_id"]},
|
||||
{"$set": {"results": {"time" : datetime.now(), "res" : res}}},
|
||||
upsert=True)
|
||||
|
||||
def log(self, text):
|
||||
self._log.append([datetime.now().isoformat(), text])
|
||||
self._mongo.db.results.update({"jobid": self._jobinfo["_id"]},
|
||||
{"$set": {"log": self._log}},
|
||||
upsert=True)
|
||||
|
||||
def run(self):
|
||||
self.log("Starting job")
|
||||
res = False
|
||||
try:
|
||||
res = self._job.run()
|
||||
except Exception, e:
|
||||
self.log("Exception raised while running: %s" % e)
|
||||
self.update_job_state("error")
|
||||
return False
|
||||
if res:
|
||||
self.log("Done job startup")
|
||||
self.update_job_state("running")
|
||||
else:
|
||||
self.log("Job startup error")
|
||||
self.update_job_state("error")
|
||||
return res
|
||||
|
||||
def get_results(self):
|
||||
self.log("Trying to get results")
|
||||
res = []
|
||||
try:
|
||||
res = self._job.get_results()
|
||||
except Exception, e:
|
||||
self.log("Exception raised while getting results: %s" % e)
|
||||
return False
|
||||
self._log_resutls(res)
|
||||
return True
|
||||
|
||||
|
||||
@celery.task
|
||||
def run_task(jobid):
|
||||
print "searching for ", jobid
|
||||
job_info = mongo.db.job.find_one({"_id": jobid})
|
||||
if not job_info:
|
||||
return False
|
||||
|
||||
job_exec = None
|
||||
try:
|
||||
job_exec = JobExecution(mongo, job_info)
|
||||
except Exception, e:
|
||||
print "init JobExecution exception - ", e
|
||||
return False
|
||||
|
||||
if not job_exec.get_job():
|
||||
job_exec.update_job_state("error")
|
||||
return False
|
||||
|
||||
if not job_exec.run():
|
||||
return False
|
||||
|
||||
if not job_exec.get_results():
|
||||
return False
|
||||
|
||||
return "done task: " + run_task.request.id
|
||||
|
||||
|
||||
@celery.task
|
||||
def update_cache(connector):
|
||||
time.sleep(30)
|
||||
return "connector: " + repr(connector)
|
|
@ -0,0 +1,4 @@
|
|||
dependencies:
|
||||
sudo pip install pyVmomi
|
||||
sudo pip install celery
|
||||
sudo pip install -U celery[mongodb]
|
|
@ -0,0 +1,4 @@
|
|||
flask
|
||||
Flask-Pymongo
|
||||
Flask-Restful
|
||||
python-dateutil
|
Loading…
Reference in New Issue