| |
@@ -27,26 +27,38 @@
|
| |
|
| |
|
| |
from execdb import app, db
|
| |
- from execdb.models.job import Job, BuildStep
|
| |
+ from execdb.models.job import Job, BuildStep, EXEC_OUTCOME
|
| |
+ from execdb.serializers.api_v1 import Serializer
|
| |
from sqlalchemy import desc
|
| |
|
| |
import json
|
| |
import re
|
| |
+ import datetime
|
| |
|
| |
from pprint import pformat
|
| |
|
| |
main = Blueprint('main', __name__)
|
| |
- BB_URL = app.config['BUILDBOT_FRONTPAGE_URL']
|
| |
RESULTSDB_URL = app.config['RESULTSDB_FRONTPAGE_URL']
|
| |
|
| |
+ QUERY_LIMIT = 20
|
| |
+
|
| |
RE_PAGE = re.compile(r"([?&])page=([0-9]+)")
|
| |
+ RE_CALLBACK = re.compile(r"([?&])callback=[^&]*&?")
|
| |
+ RE_CLEAN_AMPERSANDS = re.compile(r'&+')
|
| |
+
|
| |
RP = {}
|
| |
- RP['get_jobs'] = reqparse.RequestParser()
|
| |
- RP['get_jobs'].add_argument('page', default=0, type=int, location='args')
|
| |
- RP['get_jobs'].add_argument('limit', default=30, type=int, location='args')
|
| |
+ SERIALIZE = Serializer().serialize
|
| |
|
| |
+ # =============================================================================
|
| |
+ # GLOBAL METHODS
|
| |
+ # =============================================================================
|
| |
|
| |
def pagination(q, page, limit):
|
| |
+ """
|
| |
+ Sets the offset/limit for the DB query.
|
| |
+ limit+1 is purposely set as 'limit' so we can later on decide whether 'next'
|
| |
+ page link should be provided or set to None.
|
| |
+ """
|
| |
# pagination offset
|
| |
try:
|
| |
page = int(page)
|
| |
@@ -56,45 +68,66 @@
|
| |
except (TypeError, ValueError):
|
| |
pass
|
| |
|
| |
- # apply the query limit
|
| |
- try:
|
| |
- limit = int(limit)
|
| |
- except (ValueError, TypeError):
|
| |
- limit = QUERY_LIMIT
|
| |
-
|
| |
- q = q.limit(limit)
|
| |
+ q = q.limit(limit + 1)
|
| |
return q
|
| |
|
| |
- # TODO: find a better way to do this
|
| |
|
| |
-
|
| |
- def prev_next_urls():
|
| |
+ def prev_next_urls(data, limit=QUERY_LIMIT):
|
| |
global RE_PAGE
|
| |
+
|
| |
try:
|
| |
match = RE_PAGE.findall(request.url)
|
| |
flag, page = match[0][0], int(match[0][1])
|
| |
except IndexError: # page not found
|
| |
- if '?' in request.url:
|
| |
- return None, "%s&page=1" % request.url
|
| |
- else:
|
| |
- return None, "%s?page=1" % request.url
|
| |
+ page = None
|
| |
|
| |
prev = None
|
| |
next = None
|
| |
- prevpage = page - 1
|
| |
- nextpage = page + 1
|
| |
+ placeholder = "[!@#$%^&*PLACEHOLDER*&^%$#@!]"
|
| |
+
|
| |
+ if page is None:
|
| |
+ if "?" in request.url:
|
| |
+ baseurl = "%s&page=%s" % (request.url, placeholder)
|
| |
+ else:
|
| |
+ baseurl = "%s?page=%s" % (request.url, placeholder)
|
| |
+ page = 0
|
| |
+ else:
|
| |
+ baseurl = RE_PAGE.sub("%spage=%s" % (flag, placeholder), request.url)
|
| |
+
|
| |
+ baseurl = RE_CALLBACK.sub(r"\1", baseurl)
|
| |
+ baseurl = RE_CLEAN_AMPERSANDS.sub('&', baseurl)
|
| |
|
| |
if page > 0:
|
| |
- prev = RE_PAGE.sub("%spage=%s" % (flag, prevpage), request.url)
|
| |
- next = RE_PAGE.sub("%spage=%s" % (flag, nextpage), request.url)
|
| |
+ prev = baseurl.replace(placeholder, str(page - 1))
|
| |
+ if len(data) > limit:
|
| |
+ next = baseurl.replace(placeholder, str(page + 1))
|
| |
+ data = data[:limit]
|
| |
|
| |
- return prev, next
|
| |
+ return data, prev, next
|
| |
|
| |
|
| |
- @main.route('/')
|
| |
- @main.route('/index')
|
| |
- @main.route('/jobs', methods=['GET'])
|
| |
- def index():
|
| |
+ # =============================================================================
|
| |
+ # API
|
| |
+ # =============================================================================
|
| |
+
|
| |
+ # =============================================================================
|
| |
+ # JOBS
|
| |
+ # =============================================================================
|
| |
+
|
| |
+ @main.route('/api/v1/jobs/<uuid>', methods=['GET'])
|
| |
+ def get_job(uuid):
|
| |
+ job = Job.query.filter_by(uuid=uuid).first()
|
| |
+ if not job:
|
| |
+ return jsonify({'message': 'Job not found'}), 404
|
| |
+ return jsonify(SERIALIZE(job))
|
| |
+
|
| |
+
|
| |
+ RP['get_jobs'] = reqparse.RequestParser()
|
| |
+ RP['get_jobs'].add_argument('page', default=0, type=int, location='args')
|
| |
+ RP['get_jobs'].add_argument('limit', default=QUERY_LIMIT, type=int, location='args')
|
| |
+
|
| |
+ @main.route('/api/v1/jobs', methods=['GET'])
|
| |
+ def get_jobs():
|
| |
try:
|
| |
args = RP['get_jobs'].parse_args()
|
| |
except JSONBadRequest as error:
|
| |
@@ -103,237 +136,311 @@
|
| |
return jsonify(error.data), error.code
|
| |
|
| |
query = db.session.query(Job).order_by(desc(Job.t_triggered))
|
| |
+
|
| |
query = pagination(query, args['page'], args['limit'])
|
| |
+ data, prev, next = prev_next_urls(query.all(), args['limit'])
|
| |
|
| |
- prev, next = prev_next_urls()
|
| |
- jobs = query.all()
|
| |
-
|
| |
- return render_template('index.html',
|
| |
- jobs=jobs,
|
| |
- buildbot_url=BB_URL,
|
| |
- prev=prev,
|
| |
- next=next)
|
| |
+ return jsonify(dict(
|
| |
+ prev=prev,
|
| |
+ next=next,
|
| |
+ data=[SERIALIZE(o) for o in data],
|
| |
+ ))
|
| |
|
| |
+ RP['create_job'] = reqparse.RequestParser()
|
| |
+ RP['create_job'].add_argument('taskname', required=True, location='json')
|
| |
+ RP['create_job'].add_argument('item', required=True, location='json')
|
| |
+ RP['create_job'].add_argument('item_type', location='json')
|
| |
+ RP['create_job'].add_argument('ref_url', location='json')
|
| |
|
| |
- @main.route('/jobs/<uuid>', methods=['GET'])
|
| |
- def show_job(uuid):
|
| |
+ @main.route('/api/v1/jobs', methods=['POST'])
|
| |
+ def create_job():
|
| |
try:
|
| |
- job = db.session.query(Job).filter(Job.uuid == uuid).one()
|
| |
- except orm_exc.NoResultFound:
|
| |
- return 'UUID not found', 404
|
| |
- job.t_triggered = str(job.t_triggered).split('.')[0]
|
| |
- return render_template('show_job.html',
|
| |
- job=job,
|
| |
- buildbot_url=BB_URL,
|
| |
- resultsdb_url=RESULTSDB_URL,
|
| |
- artifacts_base_url=app.config['ARTIFACTS_BASE_URL'])
|
| |
+ args = RP['create_job'].parse_args()
|
| |
+ except JSONBadRequest as error:
|
| |
+ return jsonify({"message": "Malformed Request: %s" % error.data['message']}), error.code
|
| |
+ except HTTPException as error:
|
| |
+ return jsonify(error.data), error.code
|
| |
|
| |
+ job = Job(**args)
|
| |
+ db.session.add(job)
|
| |
+ db.session.commit()
|
| |
+ # FIXME - add resultsdb-like serializer
|
| |
+ retval = {
|
| |
+ 'uuid': job.uuid,
|
| |
+ 'taskname': job.taskname,
|
| |
+ 'item': job.item,
|
| |
+ 'item_type': job.item_type,
|
| |
+ 'ref_url': job.ref_url,
|
| |
+ 'outcome': job.outcome,
|
| |
+ 'status': job.status
|
| |
+ }
|
| |
|
| |
- @main.route('/jobs/<uuid>/steps', methods=['GET'])
|
| |
- def show_steps(uuid):
|
| |
- try:
|
| |
- job = db.session.query(Job).filter(Job.uuid == uuid).one()
|
| |
- except orm_exc.NoResultFound:
|
| |
- return 'UUID not found', 404
|
| |
+ return jsonify(SERIALIZE(job)), 201
|
| |
|
| |
- steps = dict(
|
| |
- buildbot_url=BB_URL,
|
| |
- steps=[],
|
| |
- job_status=job.current_state,
|
| |
- job_duration=str(job.build_took),
|
| |
- )
|
| |
- name_map = {
|
| |
- "rm_tmp": "Cleaning-up temporary files",
|
| |
- "rm_log": "Cleaning-up log files",
|
| |
- "git": "Cloning the task repository",
|
| |
- "runtask": "Running the testsuite",
|
| |
- "MasterShellCommand": "Creating artifacts directory on master",
|
| |
- "upload": "Uploading the artifacts to master",
|
| |
- "MasterShellCommand_1": "Compressing artifacts",
|
| |
- "MasterShellCommand_2": "Publishing the artifacts",
|
| |
- }
|
| |
- for step in job.build_steps:
|
| |
- if step.status not in ['OK', 'NOT OK', 'INPROGRESS']:
|
| |
- continue
|
| |
- s = {}
|
| |
- s['name'] = step.name
|
| |
- s['description'] = name_map.get(step.name, step.name)
|
| |
- s['status'] = step.status
|
| |
- s['log_url'] = step.log_url
|
| |
- duration = step.step_took
|
| |
- minutes = duration.seconds / 60
|
| |
- seconds = duration.seconds - (minutes*60)
|
| |
- duration = []
|
| |
- if minutes:
|
| |
- duration.append('%s minutes' % minutes)
|
| |
- duration.append('%s seconds' % seconds)
|
| |
- s['duration'] = ' '.join(duration)
|
| |
- steps['steps'].append(s)
|
| |
-
|
| |
- return jsonify(steps)
|
| |
-
|
| |
-
|
| |
-
|
| |
- @main.route('/jobs', methods=['POST'])
|
| |
- def create_job():
|
| |
- job = Job()
|
| |
|
| |
- data = request.json
|
| |
- job.fedmsg_data = json.dumps(data)
|
| |
+ def _update_job(uuid, start=False, ref_url=None, outcome=None, duration=None):
|
| |
+ if duration:
|
| |
+ try:
|
| |
+ duration = int(duration)
|
| |
+ except ValueError:
|
| |
+ return jsonify({'message': 'duration must be int or None'}), 400
|
| |
+
|
| |
+ job = Job.query.filter_by(uuid=uuid).first()
|
| |
+ if not job:
|
| |
+ return jsonify({'message': 'Job not found'}), 404
|
| |
|
| |
- # FIXME - add validation
|
| |
- job.taskname = data['taskname']
|
| |
- job.item = data['item']
|
| |
- job.item_type = data['item_type']
|
| |
- job.arch = data['arch']
|
| |
+ if outcome and outcome not in EXEC_OUTCOME+('_COMPUTED_', ):
|
| |
+ return jsonify({'message': "outcome %r not one of %r" % (outcome, EXEC_OUTCOME+['_COMPUTE_'],)}), 400
|
| |
+
|
| |
+ if start:
|
| |
+ job.start()
|
| |
+
|
| |
+ if outcome:
|
| |
+ try:
|
| |
+ # mark last step as finished, if not done already
|
| |
+ last_step = job.steps[-1]
|
| |
+ if last_step.status != 'FINISHED':
|
| |
+ last_step.finish()
|
| |
+ db.session.add(last_step)
|
| |
+ except IndexError:
|
| |
+ pass
|
| |
+ job.finish(outcome)
|
| |
+
|
| |
+ if duration is not None:
|
| |
+ if job.status != 'FINISHED':
|
| |
+ job.finish()
|
| |
+ job.t_finished = datetime.datetime.utcnow()
|
| |
+ job.t_started = job.t_finished - datetime.timedelta(seconds=duration)
|
| |
+
|
| |
+
|
| |
+ if ref_url:
|
| |
+ job.ref_url = ref_url
|
| |
|
| |
db.session.add(job)
|
| |
db.session.commit()
|
| |
- # FIXME - add resultsdb-like serializer
|
| |
- retval = {
|
| |
- 'id': job.id,
|
| |
- 'uuid': job.uuid,
|
| |
- 't_triggered': job.t_triggered.isoformat()
|
| |
- }
|
| |
|
| |
- return jsonify(retval), 201
|
| |
+ return jsonify(SERIALIZE(job))
|
| |
|
| |
|
| |
- def process_event(data):
|
| |
+ RP['update_job'] = reqparse.RequestParser()
|
| |
+ RP['update_job'].add_argument('start', type=bool, default=False, location='json')
|
| |
+ RP['update_job'].add_argument('ref_url', location='json')
|
| |
+ RP['update_job'].add_argument('outcome', location='json')
|
| |
+ RP['update_job'].add_argument('duration', location='json')
|
| |
|
| |
- def bb_convert_properties(prop):
|
| |
- """Converts list of lists to dict"""
|
| |
- return dict([(key, value) for key, value, _ in prop])
|
| |
+ @main.route('/api/v1/jobs/<uuid>', methods=['POST'])
|
| |
+ def update_job(uuid):
|
| |
+ try:
|
| |
+ args = RP['update_job'].parse_args()
|
| |
+ except JSONBadRequest as error:
|
| |
+ return jsonify({"message": "Malformed Request: %s" % error.data['message']}), error.code
|
| |
+ except HTTPException as error:
|
| |
+ return jsonify(error.data), error.code
|
| |
|
| |
- # at the moment, we act just on these events
|
| |
- event = data['event']
|
| |
- known_events = ['changeAdded', 'buildStarted', 'stepStarted',
|
| |
- 'stepFinished', 'buildFinished']
|
| |
+ return _update_job(uuid, **args)
|
| |
|
| |
- if event not in known_events:
|
| |
- # FIXME remove
|
| |
- if 'uuid' in json.dumps(data):
|
| |
- app.logger.debug("UUID found in %s", event)
|
| |
|
| |
- return 'Skipping event', 204
|
| |
+ # =============================================================================
|
| |
+ # JENKINS WORKAROUND
|
| |
+ # =============================================================================
|
| |
|
| |
- # grab the 'properties' field
|
| |
- if event == 'changeAdded':
|
| |
- properties = bb_convert_properties(data['payload']['change']['properties'])
|
| |
- elif event in ['buildStarted', 'buildFinished']:
|
| |
- properties = bb_convert_properties(data['payload']['build']['properties'])
|
| |
- elif event in ['stepStarted', 'stepFinished']:
|
| |
- properties = bb_convert_properties(data['payload']['properties'])
|
| |
+ RP['jenkins_notification'] = reqparse.RequestParser()
|
| |
+ RP['jenkins_notification'].add_argument('build', type=dict, required=True, location='json')
|
| |
+ @main.route('/api/v1/jobs/jenkins_notification', methods=['POST'])
|
| |
+ def jenkins_notification():
|
| |
+ try:
|
| |
+ args = RP['jenkins_notification'].parse_args()
|
| |
+ except JSONBadRequest as error:
|
| |
+ return jsonify({"message": "Malformed Request: %s" % error.data['message']}), error.code
|
| |
+ except HTTPException as error:
|
| |
+ return jsonify(error.data), error.code
|
| |
|
| |
- # abort if uuid is not provided
|
| |
try:
|
| |
- uuid = properties['uuid']
|
| |
+ data = args['build']
|
| |
+ phase = data['phase']
|
| |
+ ref_url = data['full_url']
|
| |
+ uuid = data['parameters']['uuid']
|
| |
except KeyError:
|
| |
- return 'Missing `uuid` field in properties', 400
|
| |
+ return jsonify({"message": "Malformed Request, missing one of build, phase, full_url, parameters->uuid"}), 400
|
| |
+
|
| |
+ if phase == 'SCHEDULED':
|
| |
+ return _update_job(uuid, start=False, ref_url=ref_url)
|
| |
+
|
| |
+ if phase == 'STARTED':
|
| |
+ return _update_job(uuid, start=True, ref_url=ref_url)
|
| |
+
|
| |
+ if phase == 'FINALIZED':
|
| |
+ try:
|
| |
+ status = data['status']
|
| |
+ except KeyError:
|
| |
+ return jsonify({"message": "Malformed Request, missing status"}), 400
|
| |
+ outcome = {'SUCCESS': 'COMPLETED', 'FAILURE': 'FAILED', 'ABORTED': 'ABORTED'}.get(status, 'FAILED')
|
| |
+ return _update_job(uuid, ref_url=ref_url, outcome=outcome)
|
| |
+
|
| |
+ return jsonify({"message": "Nothing happened"}), 200
|
| |
+
|
| |
|
| |
- if uuid is None:
|
| |
- return 'UUID set to None', 400
|
| |
+ # =============================================================================
|
| |
+ # STEPS
|
| |
+ # =============================================================================
|
| |
|
| |
+ RP['create_step'] = reqparse.RequestParser()
|
| |
+ RP['create_step'].add_argument('name', required=True, location='json')
|
| |
+ RP['create_step'].add_argument('description', location='json')
|
| |
+ RP['create_step'].add_argument('duration', location='json')
|
| |
+ RP['create_step'].add_argument('outcome', location='json')
|
| |
+
|
| |
+ @main.route('/api/v1/jobs/<uuid>/steps', methods=['POST'])
|
| |
+ def create_step(uuid):
|
| |
try:
|
| |
- job = db.session.query(Job).filter(Job.uuid == uuid).one()
|
| |
- except orm_exc.NoResultFound:
|
| |
- return 'UUID not found', 400
|
| |
+ args = RP['create_step'].parse_args()
|
| |
+ except JSONBadRequest as error:
|
| |
+ return jsonify({"message": "Malformed Request: %s" % error.data['message']}), error.code
|
| |
+ except HTTPException as error:
|
| |
+ return jsonify(error.data), error.code
|
| |
|
| |
- if event == 'changeAdded':
|
| |
- # FIXME ?
|
| |
+ if args['duration']:
|
| |
+ try:
|
| |
+ args['duration'] = int(args['duration'])
|
| |
+ except ValueError:
|
| |
+ return jsonify({'message': 'duration must be int or None'}), 400
|
| |
+
|
| |
+ job = Job.query.filter_by(uuid=uuid).first()
|
| |
+ if not job:
|
| |
+ return jsonify({'message': 'Job not found'}), 404
|
| |
+
|
| |
+ try:
|
| |
+ # mark last step as finished, if not done already
|
| |
+ last_step = job.steps[-1]
|
| |
+ if last_step.status != 'FINISHED':
|
| |
+ last_step.finish()
|
| |
+ db.session.add(last_step)
|
| |
+ except IndexError:
|
| |
pass
|
| |
|
| |
- elif event == 'buildStarted' and job.current_state == 'Triggered':
|
| |
- job.start()
|
| |
+ step = BuildStep(args['description'], args['name'], args['outcome'])
|
| |
+ step.start()
|
| |
+ if args['outcome']:
|
| |
+ if step.status != 'FINISHED':
|
| |
+ step.finish(args['outcome'])
|
| |
+ step.outcome = args['outcome']
|
| |
+ if args['duration'] is not None:
|
| |
+ if step.status != 'FINISHED':
|
| |
+ step.finish()
|
| |
+ step.t_finished = datetime.datetime.utcnow()
|
| |
+ step.t_started = step.t_finished - datetime.timedelta(seconds=args['duration'])
|
| |
+
|
| |
+ job.steps.append(step)
|
| |
+
|
| |
+ db.session.add(step)
|
| |
+ db.session.add(job)
|
| |
+ db.session.commit()
|
| |
+
|
| |
+ return jsonify(SERIALIZE(step)), 201
|
| |
+
|
| |
+
|
| |
+ RP['update_step'] = reqparse.RequestParser()
|
| |
+ RP['update_step'].add_argument('duration', default=None, location='json')
|
| |
+ RP['update_step'].add_argument('outcome', default='COMPLETED', location='json')
|
| |
+
|
| |
+ @main.route('/api/v1/jobs/<uuid>/steps/<id>', methods=['POST'])
|
| |
+ def update_step(uuid, id):
|
| |
+ try:
|
| |
+ args = RP['update_step'].parse_args()
|
| |
+ except JSONBadRequest as error:
|
| |
+ return jsonify({"message": "Malformed Request: %s" % error.data['message']}), error.code
|
| |
+ except HTTPException as error:
|
| |
+ return jsonify(error.data), error.code
|
| |
|
| |
- job.taskname = properties['taskname']
|
| |
- job.item = properties['item']
|
| |
- job.item_type = properties['item_type']
|
| |
- job.arch = properties['arch']
|
| |
- job.slavename = properties['slavename']
|
| |
- job.link_build_log = '/builders/%s/builds/%s' % (
|
| |
- data['payload']['build']['builderName'],
|
| |
- properties['buildnumber'])
|
| |
-
|
| |
- db.session.add(job)
|
| |
-
|
| |
- # add 'empty' steps for the build (since we know them already)
|
| |
- # app.logger.debug("%s: %s" % (uuid, data['payload']['build']['steps']))
|
| |
- # app.logger.debug("%s - Build Started" % uuid)
|
| |
- for step_info in data['payload']['build']['steps']:
|
| |
- # app.logger.debug("%s -- adding step %s"% (uuid, step_info['name']))
|
| |
- step = BuildStep(name=step_info['name'])
|
| |
- step.job = job
|
| |
- db.session.add(step)
|
| |
-
|
| |
- db.session.commit()
|
| |
-
|
| |
- elif event == 'stepStarted' and job.current_state == 'Running':
|
| |
- step_info = data['payload']['step']
|
| |
- # app.logger.debug("%s - Step Started - %s"% (uuid, step_info['name']))
|
| |
+ if args['duration']:
|
| |
try:
|
| |
- step = job.get_build_step(step_info['name'])
|
| |
- except KeyError:
|
| |
- app.logger.debug("Job %s had missing step %s", job.uuid, step_info)
|
| |
- step = BuildStep(name=step_info['name'])
|
| |
- step.job = job
|
| |
-
|
| |
- step.start()
|
| |
- step.status = 'INPROGRESS'
|
| |
- step.data = json.dumps(data['payload']) # FIXME - store sensible subset of data
|
| |
- db.session.add(step)
|
| |
- db.session.commit()
|
| |
- # app.logger.debug("%s - Step Started - %s - written to db"% (uuid, step_info['name']))
|
| |
-
|
| |
- elif event == 'stepFinished' and job.current_state == 'Running':
|
| |
- step_info = data['payload']['step']
|
| |
- # app.logger.debug("%s - Step Finished - %s"% (uuid, step_info['name']))
|
| |
+ args['duration'] = int(args['duration'])
|
| |
+ except ValueError:
|
| |
+ return jsonify({'message': 'duration must be int or None'}), 400
|
| |
+
|
| |
+ job = Job.query.filter_by(uuid=uuid).first()
|
| |
+ if not job:
|
| |
+ return jsonify({'message': 'Job not found'}), 404
|
| |
+
|
| |
+ if id == 'last':
|
| |
try:
|
| |
- step = job.get_build_step(step_info['name'])
|
| |
- except KeyError:
|
| |
- return 'StepFinished received for non-existing step: %r' % step_info['name'], 400
|
| |
+ step = job.steps[-1]
|
| |
+ except IndexError:
|
| |
+ return jsonify({'message': "Step not found"}), 404
|
| |
+ else:
|
| |
+ try:
|
| |
+ id = int(id)
|
| |
+ except ValueError:
|
| |
+ return jsonify({'message': "id %r could not be converted to int" % id}), 400
|
| |
+ try:
|
| |
+ step = job.steps[id]
|
| |
+ except IndexError:
|
| |
+ return jsonify({'message': "Step not found"}), 404
|
| |
|
| |
- step.finish()
|
| |
+ if args['outcome'] and args['outcome'] not in EXEC_OUTCOME:
|
| |
+ return jsonify({'message': "outcome %r not one of %r" % (args['outcome'], EXEC_OUTCOME,)}), 400
|
| |
|
| |
- step.status = 'OK'
|
| |
- # results key is only present for non-ok results
|
| |
- if 'results' in step_info.keys():
|
| |
- step.status = 'NOT OK'
|
| |
- step.data = json.dumps(data['payload']) # FIXME - store sensible subset of data
|
| |
+ if args['outcome']:
|
| |
+ if step.status != 'FINISHED':
|
| |
+ step.finish(args['outcome'])
|
| |
+ step.outcome = args['outcome']
|
| |
|
| |
- db.session.add(step)
|
| |
- db.session.commit()
|
| |
- # app.logger.debug("%s - Step Finished - %s - written to db" % (uuid, step_info['name']))
|
| |
+ if args['duration'] is not None:
|
| |
+ if step.status != 'FINISHED':
|
| |
+ step.finish()
|
| |
+ step.t_finished = datetime.datetime.utcnow()
|
| |
+ step.t_started = step.t_finished - datetime.timedelta(seconds=args['duration'])
|
| |
|
| |
- elif event == 'buildFinished' and job.current_state == 'Running':
|
| |
- job.finish()
|
| |
- db.session.add(job)
|
| |
- db.session.commit()
|
| |
- # app.logger.debug("%s - Build Finished " % uuid)
|
| |
+ db.session.add(step)
|
| |
+ db.session.add(job)
|
| |
+ db.session.commit()
|
| |
|
| |
+ return jsonify(SERIALIZE(step))
|
| |
|
| |
- @main.route('/buildbottest', methods=['POST'])
|
| |
- def bb_push():
|
| |
- """
|
| |
- Receives the post-push notifications from buildbot and fills in
|
| |
- the steps for the job.
|
| |
- """
|
| |
- # data are embedded in form field 'packets'
|
| |
- data = request.form
|
| |
+
|
| |
+ # =============================================================================
|
| |
+ # FRONTEND
|
| |
+ # =============================================================================
|
| |
+
|
| |
+ RP['index'] = reqparse.RequestParser()
|
| |
+ RP['index'].add_argument('page', default=0, type=int, location='args')
|
| |
+ RP['index'].add_argument('limit', default=QUERY_LIMIT, type=int, location='args')
|
| |
+
|
| |
+ @main.route('/')
|
| |
+ @main.route('/index')
|
| |
+ @main.route('/jobs', methods=['GET'])
|
| |
+ def index():
|
| |
try:
|
| |
- data = request.form['packets']
|
| |
- except werkzeug.exceptions.BadRequestKeyError:
|
| |
- return 'Field `packets` missing in request form.', 400
|
| |
- data = json.loads(data)
|
| |
-
|
| |
- # app.logger.debug(pformat(data))
|
| |
-
|
| |
- # multiple messages may be present in one 'packet'
|
| |
- for entry in data:
|
| |
- process_event(entry)
|
| |
- # app.logger.debug("%s %s, %s", entry['id'], entry['event'], process_event(entry))
|
| |
-
|
| |
- # plain 200 code needs to be returned - otherwise buildbot is
|
| |
- # endlessly trying to re-send the message.
|
| |
- # FIXME - add logging for non-200 responses
|
| |
- return '', 200
|
| |
+ args = RP['index'].parse_args()
|
| |
+ except JSONBadRequest as error:
|
| |
+ return jsonify({"message": "Bad Request"}), error.code
|
| |
+ except HTTPException as error:
|
| |
+ return jsonify(error.data), error.code
|
| |
+
|
| |
+ query = db.session.query(Job).order_by(desc(Job.t_triggered))
|
| |
+
|
| |
+ query = pagination(query, args['page'], args['limit'])
|
| |
+ data, prev, next = prev_next_urls(query.all(), args['limit'])
|
| |
+
|
| |
+ jobs = [SERIALIZE(d) for d in data]
|
| |
+
|
| |
+ return render_template('index.html',
|
| |
+ jobs=jobs,
|
| |
+ prev=prev,
|
| |
+ next=next)
|
| |
+
|
| |
+
|
| |
+ @main.route('/jobs/<uuid>', methods=['GET'])
|
| |
+ def show_job(uuid):
|
| |
+ try:
|
| |
+ job = db.session.query(Job).filter(Job.uuid == uuid).one()
|
| |
+ except orm_exc.NoResultFound:
|
| |
+ return 'UUID not found', 404
|
| |
+
|
| |
+ job = SERIALIZE(job)
|
| |
+
|
| |
+ return render_template('show_job.html',
|
| |
+ job=job,
|
| |
+ resultsdb_url=RESULTSDB_URL,
|
| |
+ artifacts_base_url=app.config['ARTIFACTS_BASE_URL'])
|
| |
+
|
| |
Instead of consuming the Buildbot push notification stream, ExecDB provides a comprehensive API to
The default behavior tries to make stuff as reasonable as possibe:
UI still updates dynamically in the job detail view.
I'd like to discuss some of the implementation details
UNFINISHED:
Read the whole conversation at: https://fedorapeople.org/groups/qa/phabarchive/differentials/phab.qa.fedoraproject.org/D1150.html