2016-06-04 21:46:07 +08:00
|
|
|
import time
|
|
|
|
from flask import Flask
|
2016-06-05 18:55:41 +08:00
|
|
|
from datetime import datetime
|
2016-06-04 21:46:07 +08:00
|
|
|
from flask.ext.pymongo import PyMongo
|
|
|
|
from celery import Celery
|
2016-06-05 18:55:41 +08:00
|
|
|
from common import *
|
2016-06-04 21:46:07 +08:00
|
|
|
|
|
|
|
def make_celery(app):
|
|
|
|
celery = Celery(main='MONKEY_TASKS', backend=app.config['CELERY_RESULT_BACKEND'],
|
|
|
|
broker=app.config['BROKER_URL'])
|
|
|
|
celery.conf.update(app.config)
|
|
|
|
TaskBase = celery.Task
|
|
|
|
class ContextTask(TaskBase):
|
|
|
|
abstract = True
|
|
|
|
def __call__(self, *args, **kwargs):
|
|
|
|
with app.app_context():
|
|
|
|
return TaskBase.__call__(self, *args, **kwargs)
|
|
|
|
celery.Task = ContextTask
|
|
|
|
return celery
|
|
|
|
|
|
|
|
fapp = Flask(__name__)
|
|
|
|
fapp.config.from_object('dbconfig')
|
|
|
|
celery = make_celery(fapp)
|
|
|
|
mongo = PyMongo(fapp)
|
|
|
|
|
2016-06-05 23:52:04 +08:00
|
|
|
|
2016-06-05 18:55:41 +08:00
|
|
|
class JobExecution(object):
|
|
|
|
_jobinfo = None
|
|
|
|
_job = None
|
|
|
|
_mongo = None
|
|
|
|
_log = []
|
|
|
|
|
|
|
|
def __init__(self, mongo, jobinfo):
|
|
|
|
self._mongo = mongo
|
|
|
|
self._jobinfo = jobinfo
|
|
|
|
self.update_job_state("processing")
|
|
|
|
|
|
|
|
job_class = get_jobclass_by_name(self._jobinfo["type"])
|
2016-06-05 23:52:04 +08:00
|
|
|
con = job_class.connector_type()
|
2016-06-05 18:55:41 +08:00
|
|
|
refresh_connector_config(self._mongo, con)
|
2016-06-05 23:52:04 +08:00
|
|
|
self._job = job_class(con, self)
|
|
|
|
self._job.load_job_properties(self._jobinfo["properties"])
|
2016-06-05 18:55:41 +08:00
|
|
|
|
|
|
|
def get_job(self):
|
|
|
|
return self._job
|
|
|
|
|
|
|
|
def refresh_job_info(self):
|
|
|
|
self._jobinfo = self._mongo.db.job.find_one({"_id": self._jobinfo["_id"]})
|
|
|
|
|
|
|
|
def update_job_state(self, state):
|
|
|
|
self._jobinfo["execution"]["state"] = state
|
|
|
|
self._mongo.db.job.update({"_id": self._jobinfo["_id"]},
|
|
|
|
{"$set": {"execution": self._jobinfo["execution"]}})
|
|
|
|
|
|
|
|
def _log_resutls(self, res):
|
|
|
|
self._mongo.db.results.update({"jobid": self._jobinfo["_id"]},
|
|
|
|
{"$set": {"results": {"time" : datetime.now(), "res" : res}}},
|
|
|
|
upsert=True)
|
|
|
|
|
|
|
|
def log(self, text):
|
2016-06-05 23:52:04 +08:00
|
|
|
self._log.append([datetime.now().isoformat(), text])
|
2016-06-05 18:55:41 +08:00
|
|
|
self._mongo.db.results.update({"jobid": self._jobinfo["_id"]},
|
|
|
|
{"$set": {"log": self._log}},
|
|
|
|
upsert=True)
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
self.log("Starting job")
|
2016-06-05 23:52:04 +08:00
|
|
|
res = False
|
2016-06-05 18:55:41 +08:00
|
|
|
try:
|
2016-06-05 23:52:04 +08:00
|
|
|
res = self._job.run()
|
2016-06-05 18:55:41 +08:00
|
|
|
except Exception, e:
|
|
|
|
self.log("Exception raised while running: %s" % e)
|
|
|
|
self.update_job_state("error")
|
|
|
|
return False
|
2016-06-05 23:52:04 +08:00
|
|
|
if res:
|
|
|
|
self.log("Done job startup")
|
|
|
|
self.update_job_state("running")
|
|
|
|
else:
|
|
|
|
self.log("Job startup error")
|
|
|
|
self.update_job_state("error")
|
|
|
|
return res
|
2016-06-05 18:55:41 +08:00
|
|
|
|
|
|
|
def get_results(self):
|
|
|
|
self.log("Trying to get results")
|
|
|
|
res = []
|
|
|
|
try:
|
|
|
|
res = self._job.get_results()
|
|
|
|
except Exception, e:
|
|
|
|
self.log("Exception raised while getting results: %s" % e)
|
|
|
|
return False
|
|
|
|
self._log_resutls(res)
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2016-06-04 21:46:07 +08:00
|
|
|
@celery.task
|
|
|
|
def run_task(jobid):
|
|
|
|
print "searching for ", jobid
|
2016-06-05 18:55:41 +08:00
|
|
|
job_info = mongo.db.job.find_one({"_id": jobid})
|
|
|
|
if not job_info:
|
2016-06-04 21:46:07 +08:00
|
|
|
return False
|
|
|
|
|
2016-06-05 23:52:04 +08:00
|
|
|
job_exec = None
|
|
|
|
try:
|
|
|
|
job_exec = JobExecution(mongo, job_info)
|
|
|
|
except Exception, e:
|
|
|
|
print "init JobExecution exception - ", e
|
|
|
|
return False
|
|
|
|
|
2016-06-05 18:55:41 +08:00
|
|
|
if not job_exec.get_job():
|
2016-06-05 23:52:04 +08:00
|
|
|
job_exec.update_job_state("error")
|
2016-06-05 18:55:41 +08:00
|
|
|
return False
|
|
|
|
|
|
|
|
if not job_exec.run():
|
|
|
|
return False
|
|
|
|
|
|
|
|
if not job_exec.get_results():
|
|
|
|
return False
|
2016-06-04 21:46:07 +08:00
|
|
|
|
2016-06-05 18:55:41 +08:00
|
|
|
return "done task: " + run_task.request.id
|
2016-06-04 21:46:07 +08:00
|
|
|
|
|
|
|
|
|
|
|
@celery.task
|
|
|
|
def update_cache(connector):
|
|
|
|
time.sleep(30)
|
2016-06-05 18:55:41 +08:00
|
|
|
return "connector: " + repr(connector)
|