#13 Decouple ExecDB from Buildbot (originally D1150)
Opened 7 years ago by jskladan. Modified 7 years ago

@@ -0,0 +1,28 @@ 

+ """Bulidbot Decoupling - step 1

+ 

+ Revision ID: 104a0806520e

+ Revises: 3b51d15f0f87

+ Create Date: 2017-02-22 15:39:40.333356

+ 

+ """

+ 

+ # revision identifiers, used by Alembic.

+ revision = '104a0806520e'

+ down_revision = '6fc76719cc5f'

+ branch_labels = None

+ depends_on = None

+ 

+ from alembic import op

+ import sqlalchemy as sa

+ 

+ 

+ def upgrade():

+     op.add_column('build_step', sa.Column('outcome', sa.Enum(

+         'COMPLETED', 'FAILED', 'CRASHED', 'ABORTED', name='execoutcome'), nullable=True))

+     op.add_column(

+         'build_step', sa.Column('description', sa.Text(), nullable=True))

+ 

+ 

+ def downgrade():

+     op.drop_column('build_step', 'outcome')

+     op.drop_column('build_step', 'description')

@@ -0,0 +1,90 @@ 

+ """Bulidbot Decoupling - step 2

+ 

+ Revision ID: 3b7629fd849f

+ Revises: 104a0806520e

+ Create Date: 2017-02-22 15:43:40.895604

+ 

+ """

+ 

+ # revision identifiers, used by Alembic.

+ revision = '3b7629fd849f'

+ down_revision = '104a0806520e'

+ branch_labels = None

+ depends_on = None

+ 

+ from alembic import op

+ import sqlalchemy as sa

+ from sqlalchemy.ext.declarative import declarative_base

+ from sqlalchemy.orm import relation, sessionmaker

+ from sqlalchemy.sql import text

+ import logging

+ 

+ Session = sessionmaker()

+ Base = declarative_base()

+ 

+ 

+ EXEC_OUTCOME = ('COMPLETED', 'FAILED', 'CRASHED', 'ABORTED')

+ 

+ 

+ class BuildStep(Base):

+     __tablename__ = 'build_step'

+     id = sa.Column(sa.Integer, primary_key=True)

+     status = sa.Column(sa.String(10))

+     outcome = sa.Column(sa.Enum(*EXEC_OUTCOME, name='execoutcome'))

+ 

+ 

+ OUTCOME_MAP = {'OK': 'COMPLETED', 'NOT OK': 'FAILED', 'INPROGRESS': None}

+ DESCRIPTION_MAP = {

+     "rm_tmp": "Cleaning-up temporary files",

+     "rm_log": "Cleaning-up log files",

+     "git":    "Cloning the task repository",

+     "runtask": "Running the testsuite",

+     "MasterShellCommand": "Creating artifacts directory on master",

+     "upload": "Uploading the artifacts to master",

+     "MasterShellCommand_1": "Compressing artifacts",

+     "MasterShellCommand_2": "Publishing the artifacts",

+ }

+ 

+ 

+ def upgrade():

+     logger = logging.getLogger('alembic')

+     connection = op.get_bind()

+     session = Session(bind=connection)

+ 

+     # Alter buildsteps

+     i = 0

+     for step in session.query(BuildStep).yield_per(100):

+         i += 1

+         step.outcome = OUTCOME_MAP[step.status]

+         step.description = DESCRIPTION_MAP[step.name]

+         session.add(step)

+         if i % 1000 == 0:

+             logger.info("Traversed %s steps", i)

+             session.commit()

+     logger.info("Final steps commit")

+     session.commit()

+ 

+     op.alter_column('build_step', 'finished_at', new_column_name='t_finished')

+     op.alter_column('build_step', 'started_at', new_column_name='t_started')

+     op.alter_column('build_step', 'name', existing_type=sa.String(20), type_=sa.Text())

+     op.drop_index('buildstep_idx_status', table_name='build_step')

+     op.drop_column('build_step', 'status')

+     op.drop_column('build_step', 'data')

+ 

+     op.add_column('job', sa.Column('outcome', sa.Enum(

+         'COMPLETED', 'FAILED', 'CRASHED', 'ABORTED', name='execoutcome'), nullable=True))

+     op.alter_column('job', 'link_build_log', new_column_name='ref_url')

+     op.alter_column('job', 't_build_ended', new_column_name='t_finished')

+     op.alter_column('job', 't_build_started', new_column_name='t_started')

+     op.alter_column('job', 'taskname', existing_type=sa.String(20), type_=sa.Text())

+     op.alter_column('job', 'item_type', existing_type=sa.String(20), type_=sa.Text())

+     op.create_index('job_idx_type', 'job', [

+                     'item_type'], unique=False, postgresql_ops={'item_type': 'text_pattern_ops'})

+     op.drop_column('job', 'fedmsg_data')

+     op.drop_column('job', 'slavename')

+     op.drop_column('job', 'arch')

+ 

+ 

+ def downgrade():

+     pass

+     # FIXME - put together later on

file modified
+15 -6
@@ -121,6 +121,19 @@ 

      import time

      import datetime

      import random

+ 

+     dsc_map = {

+         "rm_tmp": "Cleaning-up temporary files",

+         "rm_log": "Cleaning-up log files",

+         "git": "Cloning the task repository",

+         "runtask": "Running the testsuite",

+         "MasterShellCommand": "Creating artifacts directory on master",

+         "upload": "Uploading the artifacts to master",

+         "MasterShellCommand_1": "Compressing artifacts",

+         "MasterShellCommand_2": "Publishing the artifacts",

+         }

+ 

+ 

      if destructive or not db.session.query(Job).count():

          job = Job()

          job.uuid = '249cb6f4-e471-11e6-a77f-525400cb0b45'
@@ -128,24 +141,20 @@ 

          job.taskname = 'rpmgrill'

          job.item = 'libtaskotron-0.4.18-1.fc25'

          job.item_type = 'koji_build'

-         job.arch = 'x86_64'

-         job.slavename = 'qa11.qa-2'

          db.session.add(job)

          db.session.commit()

-         time.sleep(10)

+         time.sleep(5)

          job.start()

          db.session.add(job)

          db.session.commit()

          time.sleep(3)

          for step_name, sleep_time in (('rm_tmp', 2), ('git', 8), ('runtask', 17), ('MasterShellCommand', 1), ('upload', 6), ('MasterShellCommand_1', 3), ('MasterShellCommand_2', 5)):

-             step = BuildStep(step_name)

+             step = BuildStep(dsc_map[step_name], step_name)

              step.job = job

-             step.status = 'INPROGRESS'

              step.start()

              db.session.add(step)

              db.session.commit()

              time.sleep(sleep_time)

-             step.status = 'OK'

              step.finish()

              db.session.add(step)

              db.session.commit()

file modified
+329 -222
@@ -27,26 +27,38 @@ 

  

  

  from execdb import app, db

- from execdb.models.job import Job, BuildStep

+ from execdb.models.job import Job, BuildStep, EXEC_OUTCOME

+ from execdb.serializers.api_v1 import Serializer

  from sqlalchemy import desc

  

  import json

  import re

+ import datetime

  

  from pprint import pformat

  

  main = Blueprint('main', __name__)

- BB_URL = app.config['BUILDBOT_FRONTPAGE_URL']

  RESULTSDB_URL = app.config['RESULTSDB_FRONTPAGE_URL']

  

+ QUERY_LIMIT = 20

+ 

  RE_PAGE = re.compile(r"([?&])page=([0-9]+)")

+ RE_CALLBACK = re.compile(r"([?&])callback=[^&]*&?")

+ RE_CLEAN_AMPERSANDS = re.compile(r'&+')

+ 

  RP = {}

- RP['get_jobs'] = reqparse.RequestParser()

- RP['get_jobs'].add_argument('page', default=0, type=int, location='args')

- RP['get_jobs'].add_argument('limit', default=30, type=int, location='args')

+ SERIALIZE = Serializer().serialize

  

+ # =============================================================================

+ #                               GLOBAL METHODS

+ # =============================================================================

  

  def pagination(q, page, limit):

+     """

+     Sets the offset/limit for the DB query.

+     limit+1 is purposely set as 'limit' so we can later on decide whether 'next'

+     page link should be provided or set to None.

+     """

      # pagination offset

      try:

          page = int(page)
@@ -56,45 +68,66 @@ 

      except (TypeError, ValueError):

          pass

  

-     # apply the query limit

-     try:

-         limit = int(limit)

-     except (ValueError, TypeError):

-         limit = QUERY_LIMIT

- 

-     q = q.limit(limit)

+     q = q.limit(limit + 1)

      return q

  

- # TODO: find a better way to do this

  

- 

- def prev_next_urls():

+ def prev_next_urls(data, limit=QUERY_LIMIT):

      global RE_PAGE

+ 

      try:

          match = RE_PAGE.findall(request.url)

          flag, page = match[0][0], int(match[0][1])

      except IndexError:  # page not found

-         if '?' in request.url:

-             return None, "%s&page=1" % request.url

-         else:

-             return None, "%s?page=1" % request.url

+         page = None

  

      prev = None

      next = None

-     prevpage = page - 1

-     nextpage = page + 1

+     placeholder = "[!@#$%^&*PLACEHOLDER*&^%$#@!]"

+ 

+     if page is None:

+         if "?" in request.url:

+             baseurl = "%s&page=%s" % (request.url, placeholder)

+         else:

+             baseurl = "%s?page=%s" % (request.url, placeholder)

+         page = 0

+     else:

+         baseurl = RE_PAGE.sub("%spage=%s" % (flag, placeholder), request.url)

+ 

+     baseurl = RE_CALLBACK.sub(r"\1", baseurl)

+     baseurl = RE_CLEAN_AMPERSANDS.sub('&', baseurl)

  

      if page > 0:

-         prev = RE_PAGE.sub("%spage=%s" % (flag, prevpage), request.url)

-     next = RE_PAGE.sub("%spage=%s" % (flag, nextpage), request.url)

+         prev = baseurl.replace(placeholder, str(page - 1))

+     if len(data) > limit:

+         next = baseurl.replace(placeholder, str(page + 1))

+         data = data[:limit]

  

-     return prev, next

+     return data, prev, next

  

  

- @main.route('/')

- @main.route('/index')

- @main.route('/jobs', methods=['GET'])

- def index():

+ # =============================================================================

+ #                                    API

+ # =============================================================================

+ 

+ # =============================================================================

+ #                                    JOBS

+ # =============================================================================

+ 

+ @main.route('/api/v1/jobs/<uuid>', methods=['GET'])

+ def get_job(uuid):

+     job = Job.query.filter_by(uuid=uuid).first()

+     if not job:

+         return jsonify({'message': 'Job not found'}), 404

+     return jsonify(SERIALIZE(job))

+ 

+ 

+ RP['get_jobs'] = reqparse.RequestParser()

+ RP['get_jobs'].add_argument('page', default=0, type=int, location='args')

+ RP['get_jobs'].add_argument('limit', default=QUERY_LIMIT, type=int, location='args')

+ 

+ @main.route('/api/v1/jobs', methods=['GET'])

+ def get_jobs():

      try:

          args = RP['get_jobs'].parse_args()

      except JSONBadRequest as error:
@@ -103,237 +136,311 @@ 

          return jsonify(error.data), error.code

  

      query = db.session.query(Job).order_by(desc(Job.t_triggered))

+ 

      query = pagination(query, args['page'], args['limit'])

+     data, prev, next = prev_next_urls(query.all(), args['limit'])

  

-     prev, next = prev_next_urls()

-     jobs = query.all()

- 

-     return render_template('index.html',

-                            jobs=jobs,

-                            buildbot_url=BB_URL,

-                            prev=prev,

-                            next=next)

+     return jsonify(dict(

+         prev=prev,

+         next=next,

+         data=[SERIALIZE(o) for o in data],

+     ))

  

+ RP['create_job'] = reqparse.RequestParser()

+ RP['create_job'].add_argument('taskname', required=True, location='json')

+ RP['create_job'].add_argument('item', required=True, location='json')

+ RP['create_job'].add_argument('item_type', location='json')

+ RP['create_job'].add_argument('ref_url', location='json')

  

- @main.route('/jobs/<uuid>', methods=['GET'])

- def show_job(uuid):

+ @main.route('/api/v1/jobs', methods=['POST'])

+ def create_job():

      try:

-         job = db.session.query(Job).filter(Job.uuid == uuid).one()

-     except orm_exc.NoResultFound:

-         return 'UUID not found', 404

-     job.t_triggered = str(job.t_triggered).split('.')[0]

-     return render_template('show_job.html',

-                            job=job,

-                            buildbot_url=BB_URL,

-                            resultsdb_url=RESULTSDB_URL,

-                            artifacts_base_url=app.config['ARTIFACTS_BASE_URL'])

+         args = RP['create_job'].parse_args()

+     except JSONBadRequest as error:

+         return jsonify({"message": "Malformed Request: %s" % error.data['message']}), error.code

+     except HTTPException as error:

+         return jsonify(error.data), error.code

  

+     job = Job(**args)

+     db.session.add(job)

+     db.session.commit()

+     # FIXME - add resultsdb-like serializer

+     retval = {

+         'uuid': job.uuid,

+         'taskname': job.taskname,

+         'item': job.item,

+         'item_type': job.item_type,

+         'ref_url': job.ref_url,

+         'outcome': job.outcome,

+         'status': job.status

+     }

  

- @main.route('/jobs/<uuid>/steps', methods=['GET'])

- def show_steps(uuid):

-     try:

-         job = db.session.query(Job).filter(Job.uuid == uuid).one()

-     except orm_exc.NoResultFound:

-         return 'UUID not found', 404

+     return jsonify(SERIALIZE(job)), 201

  

-     steps = dict(

-         buildbot_url=BB_URL,

-         steps=[],

-         job_status=job.current_state,

-         job_duration=str(job.build_took),

-         )

-     name_map = {

-         "rm_tmp": "Cleaning-up temporary files",

-         "rm_log": "Cleaning-up log files",

-         "git":    "Cloning the task repository",

-         "runtask": "Running the testsuite",

-         "MasterShellCommand": "Creating artifacts directory on master",

-         "upload": "Uploading the artifacts to master",

-         "MasterShellCommand_1": "Compressing artifacts",

-         "MasterShellCommand_2": "Publishing the artifacts",

-         }

-     for step in job.build_steps:

-         if step.status not in ['OK', 'NOT OK', 'INPROGRESS']:

-             continue

-         s = {}

-         s['name'] = step.name

-         s['description'] = name_map.get(step.name, step.name)

-         s['status'] = step.status

-         s['log_url'] = step.log_url

-         duration = step.step_took

-         minutes = duration.seconds / 60

-         seconds = duration.seconds - (minutes*60)

-         duration = []

-         if minutes:

-             duration.append('%s minutes' % minutes)

-         duration.append('%s seconds' % seconds)

-         s['duration'] = ' '.join(duration)

-         steps['steps'].append(s)

- 

-     return jsonify(steps)

- 

- 

- 

- @main.route('/jobs', methods=['POST'])

- def create_job():

-     job = Job()

  

-     data = request.json

-     job.fedmsg_data = json.dumps(data)

+ def _update_job(uuid, start=False, ref_url=None, outcome=None, duration=None):

+     if duration:

+         try:

+             duration = int(duration)

+         except ValueError:

+             return jsonify({'message': 'duration must be int or None'}), 400

+ 

+     job = Job.query.filter_by(uuid=uuid).first()

+     if not job:

+         return jsonify({'message': 'Job not found'}), 404

  

-     # FIXME - add validation

-     job.taskname = data['taskname']

-     job.item = data['item']

-     job.item_type = data['item_type']

-     job.arch = data['arch']

+     if outcome and outcome not in EXEC_OUTCOME+('_COMPUTED_', ):

+         return jsonify({'message': "outcome %r not one of %r" % (outcome, EXEC_OUTCOME+['_COMPUTE_'],)}), 400

+ 

+     if start:

+         job.start()

+ 

+     if outcome:

+         try:

+             # mark last step as finished, if not done already

+             last_step = job.steps[-1]

+             if last_step.status != 'FINISHED':

+                 last_step.finish()

+             db.session.add(last_step)

+         except IndexError:

+             pass

+         job.finish(outcome)

+ 

+     if duration is not None:

+         if job.status != 'FINISHED':

+             job.finish()

+         job.t_finished = datetime.datetime.utcnow()

+         job.t_started = job.t_finished - datetime.timedelta(seconds=duration)

+ 

+ 

+     if ref_url:

+         job.ref_url = ref_url

  

      db.session.add(job)

      db.session.commit()

-     # FIXME - add resultsdb-like serializer

-     retval = {

-         'id': job.id,

-         'uuid': job.uuid,

-         't_triggered': job.t_triggered.isoformat()

-     }

  

-     return jsonify(retval), 201

+     return jsonify(SERIALIZE(job))

  

  

- def process_event(data):

+ RP['update_job'] = reqparse.RequestParser()

+ RP['update_job'].add_argument('start', type=bool, default=False, location='json')

+ RP['update_job'].add_argument('ref_url', location='json')

+ RP['update_job'].add_argument('outcome', location='json')

+ RP['update_job'].add_argument('duration', location='json')

  

-     def bb_convert_properties(prop):

-         """Converts list of lists to dict"""

-         return dict([(key, value) for key, value, _ in prop])

+ @main.route('/api/v1/jobs/<uuid>', methods=['POST'])

+ def update_job(uuid):

+     try:

+         args = RP['update_job'].parse_args()

+     except JSONBadRequest as error:

+         return jsonify({"message": "Malformed Request: %s" % error.data['message']}), error.code

+     except HTTPException as error:

+         return jsonify(error.data), error.code

  

-     # at the moment, we act just on these events

-     event = data['event']

-     known_events = ['changeAdded', 'buildStarted', 'stepStarted',

-                     'stepFinished', 'buildFinished']

+     return _update_job(uuid, **args)

  

-     if event not in known_events:

-         # FIXME remove

-         if 'uuid' in json.dumps(data):

-             app.logger.debug("UUID found in %s", event)

  

-         return 'Skipping event', 204

+ # =============================================================================

+ #                               JENKINS WORKAROUND

+ # =============================================================================

  

-     # grab the 'properties' field

-     if event == 'changeAdded':

-         properties = bb_convert_properties(data['payload']['change']['properties'])

-     elif event in ['buildStarted', 'buildFinished']:

-         properties = bb_convert_properties(data['payload']['build']['properties'])

-     elif event in ['stepStarted', 'stepFinished']:

-         properties = bb_convert_properties(data['payload']['properties'])

+ RP['jenkins_notification'] = reqparse.RequestParser()

+ RP['jenkins_notification'].add_argument('build', type=dict, required=True, location='json')

+ @main.route('/api/v1/jobs/jenkins_notification', methods=['POST'])

+ def jenkins_notification():

+     try:

+         args = RP['jenkins_notification'].parse_args()

+     except JSONBadRequest as error:

+         return jsonify({"message": "Malformed Request: %s" % error.data['message']}), error.code

+     except HTTPException as error:

+         return jsonify(error.data), error.code

  

-     # abort if uuid is not provided

      try:

-         uuid = properties['uuid']

+         data = args['build']

+         phase = data['phase']

+         ref_url = data['full_url']

+         uuid = data['parameters']['uuid']

      except KeyError:

-         return 'Missing `uuid` field in properties', 400

+         return jsonify({"message": "Malformed Request, missing one of build, phase, full_url, parameters->uuid"}), 400

+ 

+     if phase == 'SCHEDULED':

+         return _update_job(uuid, start=False, ref_url=ref_url)

+ 

+     if phase == 'STARTED':

+         return _update_job(uuid, start=True, ref_url=ref_url)

+ 

+     if phase == 'FINALIZED':

+         try:

+             status = data['status']

+         except KeyError:

+             return jsonify({"message": "Malformed Request, missing status"}), 400

+         outcome = {'SUCCESS': 'COMPLETED', 'FAILURE': 'FAILED', 'ABORTED': 'ABORTED'}.get(status, 'FAILED')

+         return _update_job(uuid, ref_url=ref_url, outcome=outcome)

+ 

+     return jsonify({"message": "Nothing happened"}), 200

+ 

  

-     if uuid is None:

-         return 'UUID set to None', 400

+ # =============================================================================

+ #                                   STEPS

+ # =============================================================================

  

+ RP['create_step'] = reqparse.RequestParser()

+ RP['create_step'].add_argument('name', required=True, location='json')

+ RP['create_step'].add_argument('description', location='json')

+ RP['create_step'].add_argument('duration', location='json')

+ RP['create_step'].add_argument('outcome', location='json')

+ 

+ @main.route('/api/v1/jobs/<uuid>/steps', methods=['POST'])

+ def create_step(uuid):

      try:

-         job = db.session.query(Job).filter(Job.uuid == uuid).one()

-     except orm_exc.NoResultFound:

-         return 'UUID not found', 400

+         args = RP['create_step'].parse_args()

+     except JSONBadRequest as error:

+         return jsonify({"message": "Malformed Request: %s" % error.data['message']}), error.code

+     except HTTPException as error:

+         return jsonify(error.data), error.code

  

-     if event == 'changeAdded':

-         # FIXME ?

+     if args['duration']:

+         try:

+             args['duration'] = int(args['duration'])

+         except ValueError:

+             return jsonify({'message': 'duration must be int or None'}), 400

+ 

+     job = Job.query.filter_by(uuid=uuid).first()

+     if not job:

+         return jsonify({'message': 'Job not found'}), 404

+ 

+     try:

+         # mark last step as finished, if not done already

+         last_step = job.steps[-1]

+         if last_step.status != 'FINISHED':

+             last_step.finish()

+         db.session.add(last_step)

+     except IndexError:

          pass

  

-     elif event == 'buildStarted' and job.current_state == 'Triggered':

-         job.start()

+     step = BuildStep(args['description'], args['name'], args['outcome'])

+     step.start()

+     if args['outcome']:

+         if step.status != 'FINISHED':

+             step.finish(args['outcome'])

+         step.outcome = args['outcome']

+     if args['duration'] is not None:

+         if step.status != 'FINISHED':

+             step.finish()

+         step.t_finished = datetime.datetime.utcnow()

+         step.t_started = step.t_finished - datetime.timedelta(seconds=args['duration'])

+ 

+     job.steps.append(step)

+ 

+     db.session.add(step)

+     db.session.add(job)

+     db.session.commit()

+ 

+     return jsonify(SERIALIZE(step)), 201

+ 

+ 

+ RP['update_step'] = reqparse.RequestParser()

+ RP['update_step'].add_argument('duration', default=None, location='json')

+ RP['update_step'].add_argument('outcome', default='COMPLETED', location='json')

+ 

+ @main.route('/api/v1/jobs/<uuid>/steps/<id>', methods=['POST'])

+ def update_step(uuid, id):

+     try:

+         args = RP['update_step'].parse_args()

+     except JSONBadRequest as error:

+         return jsonify({"message": "Malformed Request: %s" % error.data['message']}), error.code

+     except HTTPException as error:

+         return jsonify(error.data), error.code

  

-         job.taskname = properties['taskname']

-         job.item = properties['item']

-         job.item_type = properties['item_type']

-         job.arch = properties['arch']

-         job.slavename = properties['slavename']

-         job.link_build_log = '/builders/%s/builds/%s' % (

-             data['payload']['build']['builderName'],

-             properties['buildnumber'])

- 

-         db.session.add(job)

- 

-         # add 'empty' steps for the build (since we know them already)

- #        app.logger.debug("%s: %s" % (uuid, data['payload']['build']['steps']))

- #        app.logger.debug("%s - Build Started" % uuid)

-         for step_info in data['payload']['build']['steps']:

-             #            app.logger.debug("%s -- adding step %s"% (uuid, step_info['name']))

-             step = BuildStep(name=step_info['name'])

-             step.job = job

-             db.session.add(step)

- 

-         db.session.commit()

- 

-     elif event == 'stepStarted' and job.current_state == 'Running':

-         step_info = data['payload']['step']

- #        app.logger.debug("%s - Step Started -  %s"% (uuid, step_info['name']))

+     if args['duration']:

          try:

-             step = job.get_build_step(step_info['name'])

-         except KeyError:

-             app.logger.debug("Job %s had missing step %s", job.uuid, step_info)

-             step = BuildStep(name=step_info['name'])

-             step.job = job

- 

-         step.start()

-         step.status = 'INPROGRESS'

-         step.data = json.dumps(data['payload'])  # FIXME - store sensible subset of data

-         db.session.add(step)

-         db.session.commit()

- #        app.logger.debug("%s - Step Started -  %s - written to db"% (uuid, step_info['name']))

- 

-     elif event == 'stepFinished' and job.current_state == 'Running':

-         step_info = data['payload']['step']

- #        app.logger.debug("%s - Step Finished -  %s"% (uuid, step_info['name']))

+             args['duration'] = int(args['duration'])

+         except ValueError:

+             return jsonify({'message': 'duration must be int or None'}), 400

+ 

+     job = Job.query.filter_by(uuid=uuid).first()

+     if not job:

+         return jsonify({'message': 'Job not found'}), 404

+ 

+     if id == 'last':

          try:

-             step = job.get_build_step(step_info['name'])

-         except KeyError:

-             return 'StepFinished received for non-existing step: %r' % step_info['name'], 400

+             step = job.steps[-1]

+         except IndexError:

+             return jsonify({'message': "Step not found"}), 404

+     else:

+         try:

+             id = int(id)

+         except ValueError:

+             return jsonify({'message': "id %r could not be converted to int" % id}), 400

+         try:

+             step = job.steps[id]

+         except IndexError:

+             return jsonify({'message': "Step not found"}), 404

  

-         step.finish()

+     if args['outcome'] and args['outcome'] not in EXEC_OUTCOME:

+         return jsonify({'message': "outcome %r not one of %r" % (args['outcome'], EXEC_OUTCOME,)}), 400

  

-         step.status = 'OK'

-         # results key is only present for non-ok results

-         if 'results' in step_info.keys():

-             step.status = 'NOT OK'

-         step.data = json.dumps(data['payload'])  # FIXME - store sensible subset of data

+     if args['outcome']:

+         if step.status != 'FINISHED':

+             step.finish(args['outcome'])

+         step.outcome = args['outcome']

  

-         db.session.add(step)

-         db.session.commit()

- #        app.logger.debug("%s - Step Finished -  %s - written to db" % (uuid, step_info['name']))

+     if args['duration'] is not None:

+         if step.status != 'FINISHED':

+             step.finish()

+         step.t_finished = datetime.datetime.utcnow()

+         step.t_started = step.t_finished - datetime.timedelta(seconds=args['duration'])

  

-     elif event == 'buildFinished' and job.current_state == 'Running':

-         job.finish()

-         db.session.add(job)

-         db.session.commit()

- #        app.logger.debug("%s - Build Finished " % uuid)

+     db.session.add(step)

+     db.session.add(job)

+     db.session.commit()

  

+     return jsonify(SERIALIZE(step))

  

- @main.route('/buildbottest', methods=['POST'])

- def bb_push():

-     """

-     Receives the post-push notifications from buildbot and fills in

-     the steps for the job.

-     """

-     # data are embedded in form field 'packets'

-     data = request.form

+ 

+ # =============================================================================

+ #                                   FRONTEND

+ # =============================================================================

+ 

+ RP['index'] = reqparse.RequestParser()

+ RP['index'].add_argument('page', default=0, type=int, location='args')

+ RP['index'].add_argument('limit', default=QUERY_LIMIT, type=int, location='args')

+ 

+ @main.route('/')

+ @main.route('/index')

+ @main.route('/jobs', methods=['GET'])

+ def index():

      try:

-         data = request.form['packets']

-     except werkzeug.exceptions.BadRequestKeyError:

-         return 'Field `packets` missing in request form.', 400

-     data = json.loads(data)

- 

-     # app.logger.debug(pformat(data))

- 

-     # multiple messages may be present in one 'packet'

-     for entry in data:

-         process_event(entry)

- #        app.logger.debug("%s %s, %s", entry['id'], entry['event'], process_event(entry))

- 

-     # plain 200 code needs to be returned - otherwise buildbot is

-     # endlessly trying to re-send the message.

-     # FIXME - add logging for non-200 responses

-     return '', 200

+         args = RP['index'].parse_args()

+     except JSONBadRequest as error:

+         return jsonify({"message": "Bad Request"}), error.code

+     except HTTPException as error:

+         return jsonify(error.data), error.code

+ 

+     query = db.session.query(Job).order_by(desc(Job.t_triggered))

+ 

+     query = pagination(query, args['page'], args['limit'])

+     data, prev, next = prev_next_urls(query.all(), args['limit'])

+ 

+     jobs = [SERIALIZE(d) for d in data]

+ 

+     return render_template('index.html',

+                            jobs=jobs,

+                            prev=prev,

+                            next=next)

+ 

+ 

+ @main.route('/jobs/<uuid>', methods=['GET'])

+ def show_job(uuid):

+     try:

+         job = db.session.query(Job).filter(Job.uuid == uuid).one()

+     except orm_exc.NoResultFound:

+         return 'UUID not found', 404

+ 

+     job = SERIALIZE(job)

+ 

+     return render_template('show_job.html',

+                            job=job,

+                            resultsdb_url=RESULTSDB_URL,

+                            artifacts_base_url=app.config['ARTIFACTS_BASE_URL'])

+ 

file modified
+65 -58
@@ -18,123 +18,130 @@ 

  #    Josef Skladanka <jskladan@redhat.com>

  

  from execdb import db, app

+ from execdb.serializers import DBSerialize

  

  import datetime

  import uuid

  

  

- class Job(db.Model):

+ EXEC_OUTCOME = ('COMPLETED', 'FAILED', 'ABORTED', 'CRASHED')

+ 

+ class Job(db.Model, DBSerialize):

      id = db.Column(db.Integer, primary_key=True)

      uuid = db.Column(db.String(36), unique=True)

  

      t_triggered = db.Column(db.DateTime, default=datetime.datetime.utcnow)

-     t_build_started = db.Column(db.DateTime)

-     t_build_ended = db.Column(db.DateTime)

- 

-     fedmsg_data = db.Column(db.Text)

+     t_started = db.Column(db.DateTime)

+     t_finished = db.Column(db.DateTime)

  

      taskname = db.Column(db.Text)

      item = db.Column(db.Text)

-     item_type = db.Column(db.String(20))

-     arch = db.Column(db.String(10))

- 

-     slavename = db.Column(db.String(20))

-     # slave_env_details = db.Column(db.Text) # ???

- 

-     # task_git_hash = db.Column(db.String(40)) # ???

+     item_type = db.Column(db.Text)

  

-     # link_machine_log = db.Column(db.Text) # ???

-     link_build_log = db.Column(db.Text)  # $buildbot_url/builders/$builder_name/builds/$build_id

-     # link_resultsdb = db.Column(db.Text) # Should be UUID-based

+     outcome = db.Column(db.Enum(*EXEC_OUTCOME, name='execoutcome'))

+     ref_url = db.Column(db.Text)

  

-     build_steps = db.relation('BuildStep', backref='job', order_by="BuildStep.id")

+     steps = db.relation('BuildStep', backref='job', order_by="BuildStep.id")

  

      __table_args__ = (

          db.Index('job_idx_item', 'item',

                   postgresql_ops={'item': 'text_pattern_ops'},

                   ),

+         db.Index('job_idx_type', 'item_type',

+                  postgresql_ops={'item_type': 'text_pattern_ops'},

+                  ),

          db.Index('job_idx_taskname', 'taskname',

                   postgresql_ops={'taskname': 'text_pattern_ops'},

                   ),

      )

  

-     def __init__(self, fedmsg_data=None):

+     def __init__(self, taskname=None, item=None, item_type=None, ref_url=None):

          self.uuid = str(uuid.uuid1())

-         self.fedmsg_data = fedmsg_data

+         self.taskname = taskname

+         self.item = item

+         self.item_type = item_type

+         self.ref_url = ref_url

  

      def start(self):

-         self.t_build_started = datetime.datetime.utcnow()

+         self.t_started = datetime.datetime.utcnow()

  

-     def finish(self):

-         self.t_build_ended = datetime.datetime.utcnow()

+     def finish(self, outcome="COMPLETE"):

+         self.t_finished = datetime.datetime.utcnow()

+         if outcome == '_COMPUTED_':

+             outcome = 'COMPLETED'

+             for step in self.steps:

+                 if EXEC_OUTCOME.index(step.outcome) > EXEC_OUTCOME.index(outcome):

+                     outcome = step.outcome

+         self.outcome = outcome

  

      @property

-     def current_state(self):

-         if self.t_build_started and not self.t_build_ended:

-             return "Running"

-         if self.t_build_ended:

-             return "Finished"

-         return "Triggered"

+     def status(self):

+         if self.t_started and not self.t_finished:

+             return "RUNNING"

+         if self.t_finished:

+             return "FINISHED"

+         return "SCHEDULED"

  

      @property

      def started_after(self):

          try:

-             return self.t_build_started - self.t_triggered

+             return self.t_started - self.t_triggered

          except TypeError:

              return None

  

      @property

-     def build_took(self):

+     def duration(self):

          try:

-             return self.t_build_ended - self.t_build_started

+             return self.t_finished - self.t_started

          except TypeError:

-             return None

- 

-     def get_build_step(self, name):

-         for step in self.build_steps:

-             if step.name == name:

-                 return step

-         raise KeyError("Step %r not found" % name)

+             try:

+                 return datetime.datetime.utcnow() - self.t_started

+             except TypeError:

+                 return datetime.timedelta()

  

  

- class BuildStep(db.Model):

+ class BuildStep(db.Model, DBSerialize):

      id = db.Column(db.Integer, primary_key=True)

      job_id = db.Column(db.Integer, db.ForeignKey('job.id'))

-     name = db.Column(db.String(20))

-     status = db.Column(db.String(10))

-     started_at = db.Column(db.DateTime)

-     finished_at = db.Column(db.DateTime)

-     data = db.Column(db.Text)

+     name = db.Column(db.Text)

+     description = db.Column(db.Text)

+     outcome = db.Column(db.Enum(*EXEC_OUTCOME, name='execoutcome'))

+     t_started = db.Column(db.DateTime, default=datetime.datetime.utcnow)

+     t_finished = db.Column(db.DateTime)

  

      __table_args__ = (

-         db.Index('buildstep_idx_status', 'status',

-                  postgresql_ops={'status': 'text_pattern_ops'},

-                  ),

          db.Index('buildstep_fk_job_id', 'job_id'),

      )

  

      def __repr__(self):

-         return "(%s, %s)" % (self.name, self.status)

+         return "(%s, %s, %s)" % (self.name, self.description, self.status)

  

-     def __init__(self, name):

+     def __init__(self, description=None, name=None, outcome=None):

          self.name = name

+         self.description = description

+         self.outcome = outcome

  

      def start(self):

-         self.started_at = datetime.datetime.utcnow()

+         self.t_started = datetime.datetime.utcnow()

  

-     def finish(self):

-         self.finished_at = datetime.datetime.utcnow()

+     def finish(self, outcome="COMPLETED"):

+         self.t_finished = datetime.datetime.utcnow()

+         self.outcome = outcome

  

      @property

-     def step_took(self):

+     def status(self):

+         if self.t_started and not self.t_finished:

+             return "RUNNING"

+         if self.t_finished:

+             return "FINISHED"

+         return "SCHEDULED"

+ 

+     @property

+     def duration(self):

          try:

-             return self.finished_at - self.started_at

+             return self.t_finished - self.t_started

          except TypeError:

              try:

-                 return datetime.datetime.utcnow() - self.started_at

+                 return datetime.datetime.utcnow() - self.t_started

              except TypeError:

                  return datetime.timedelta()

- 

-     @property

-     def log_url(self):

-         return "%s/steps/%s" % (self.job.link_build_log, self.name)

@@ -0,0 +1,70 @@ 

+ # Copyright 2013, Red Hat, Inc

+ #

+ # This program is free software; you can redistribute it and/or modify

+ # it under the terms of the GNU General Public License as published by

+ # the Free Software Foundation; either version 2 of the License, or

+ # (at your option) any later version.

+ #

+ # This program is distributed in the hope that it will be useful,

+ # but WITHOUT ANY WARRANTY; without even the implied warranty of

+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

+ # GNU General Public License for more details.

+ #

+ # You should have received a copy of the GNU General Public License along

+ # with this program; if not, write to the Free Software Foundation, Inc.,

+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

+ #

+ # Authors:

+ #   Josef Skladanka <jskladan@redhat.com>

+ 

+ from datetime import date, datetime, timedelta

+ 

+ 

+ class DBSerialize(object):

+     pass

+ 

+ 

+ class BaseSerializer(object):

+ 

+     def serialize(self, value, **kwargs):

+         # serialize the database objects

+         #   the specific serializer needs to implement serialize_CLASSNAME methods

+         if DBSerialize in value.__class__.__bases__:

+             return getattr(self, '_serialize_%s' % value.__class__.__name__)(value, **kwargs)

+ 

+         if type(value) == timedelta:

+             ret = []

+             s = value.seconds

+             hours = s // 3600

+             s -= hours*3600

+             if hours:

+                 ret.append('%s hours' % hours)

+ 

+             minutes = s // 60

+             s -= minutes*60

+             if minutes:

+                 ret.append('%s minutes' % minutes)

+ 

+             ret.append('%s second%s' % (s, '' if s == 1 else 's'))

+             return ' '.join(ret)

+ 

+ 

+ 

+         # convert datetimes to the right format

+         if type(value) in (datetime, date):

+             return value.isoformat()

+ 

+         if isinstance(value, dict):

+             ret = {}

+             for k, v in value.iteritems():

+                 ret[k] = self.serialize(v, **kwargs)

+             return ret

+ 

+         # convert iterables to list of serialized stuff

+         if hasattr(value, '__iter__'):

+             ret = []

+             for v in value:

+                 ret.append(self.serialize(v, **kwargs))

+             return ret

+ 

+         return value

@@ -0,0 +1,60 @@ 

+ # Copyright 2013, Red Hat, Inc

+ #

+ # This program is free software; you can redistribute it and/or modify

+ # it under the terms of the GNU General Public License as published by

+ # the Free Software Foundation; either version 2 of the License, or

+ # (at your option) any later version.

+ #

+ # This program is distributed in the hope that it will be useful,

+ # but WITHOUT ANY WARRANTY; without even the implied warranty of

+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

+ # GNU General Public License for more details.

+ #

+ # You should have received a copy of the GNU General Public License along

+ # with this program; if not, write to the Free Software Foundation, Inc.,

+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

+ #

+ # Authors:

+ #   Josef Skladanka <jskladan@redhat.com>

+ 

+ from flask import url_for

+ from execdb.serializers import BaseSerializer

+ 

+ 

+ class Serializer(BaseSerializer):

+ 

+     def _serialize_Job(self, o, **kwargs):

+         rv = dict(

+             uuid=o.uuid,

+             t_triggered=o.t_triggered,

+             t_started=o.t_started,

+             t_finished=o.t_finished,

+             taskname=o.taskname,

+             item=o.item,

+             item_type=o.item_type,

+             outcome=o.outcome,

+             ref_url=o.ref_url,

+             status=o.status,

+             duration=o.duration,

+             steps=o.steps,

+ 

+             href=url_for('main.get_job', uuid=o.uuid, _external=True),

+         )

+ 

+         return {key: self.serialize(value) for key, value in rv.iteritems()}

+ 

+     def _serialize_BuildStep(self, o, **kwargs):

+         rv = dict(

+             id=o.job.steps.index(o),

+             name=o.name,

+             description=o.description,

+             outcome=o.outcome,

+             t_started=o.t_started,

+             t_finished=o.t_finished,

+             status=o.status,

+             duration=o.duration,

+         )

+ 

+         return {key: self.serialize(value) for key, value in rv.iteritems()}

+ 

+ 

file modified
+8 -9
@@ -7,7 +7,7 @@ 

      <th>Job</th>

      <th>Item</th>

      <th>State</th>

-     <th>Build Steps</th>

+     <th>Progress</th>

      <th>Moar</th>

    </thead>

    <tbody>
@@ -17,16 +17,15 @@ 

      {{job.taskname}}

    </td>

    <td>{{job.item|truncate(50, True)}}</td>

-   <td>{{job.current_state}}</td>

+   <td>{{job.status}}</td>

    <td>

-     {% for step in job.build_steps %}

-     {#      <a href="{{buildbot_url}}{{step.log_url}}"> #}

-         <span title="{{step.name}}"

-         {% if step.status == 'INPROGRESS' %}

+     {% for step in job.steps %}

+         <span title="{{step.name}} - {{step.description}}"

+         {% if step.status == 'RUNNING' %}

            class="glyphicon glyphicon-cog"

-         {% elif step.status == 'OK' %}

+         {% elif step.outcome == 'COMPLETED' %}

            class="glyphicon glyphicon-ok"

-         {% elif step.status == 'NOT OK' %}

+         {% elif step.outcome in ['FAILED', 'CRASHED', 'ABORTED'] %}

            style="color:#B80000" class="glyphicon glyphicon-remove"

          {% else %}

            class="glyphicon glyphicon-minus"
@@ -34,7 +33,7 @@ 

          ></span>

        {#      </a> #}

      {% endfor %}

-     <a style="float:right" title="Build Logs" href="{{buildbot_url}}{{job.link_build_log}}">

+     <a style="float:right" title="Execution Logs" href="{{job.ref_url}}">

        <span class="glyphicon glyphicon-new-window"></span>

      </a>

    </td>

file modified
+27 -148
@@ -7,44 +7,50 @@ 

  }

  

  function show_data(){

-   $.get("/jobs/{{job.uuid}}/steps", function(data){

+   $.get(window.location.pathname.replace(/\/jobs\//, '/api/v1/jobs/'), function(data){

      console.log(data);

      $('#job_progress').empty();

-     var computed_job_status = "OK";

      data.steps.forEach(function(value){

        var step_url = '<a href="'+data.buildbot_url+value.log_url+'"><i class="fa fa-external-link"></i></a>';

        var icon = '';

        var row_bgcolor = 'none';

-       if (value.status == 'INPROGRESS'){

+       if (value.status == 'RUNNING'){

          icon = '<i class="fa fa-spinner fa-pulse fa-fw"></i><span class="sr-only">In progress</span>';

          row_bgcolor = '#577591';

        }

-       else if (value.status == 'OK'){

+       else if (value.outcome == 'COMPLETED'){

          icon = '<i class="fa fa-check fa-fw"></i><span class="sr-only">OK</span>';

          row_bgcolor = '#639a81';

        }

-       else if (value.status == 'NOT OK'){

+       else {

          icon = '<i class="fa fa-times fa-fw"></i><span class="sr-only">Not OK</span>';

-         computed_job_status = "NOT OK";

          row_bgcolor = '#cc6168';

        }

-       var field = '<div class="row status" style="padding:15px 0px 15px 0px;background-color: '+row_bgcolor+';"><div class="col-sm-8 text-left">'+icon+'&nbsp;'+value.description+'</div><div class="col-sm-4 text-right">'+value.duration+'</div></div>';

+       var field = '<div class="row status" style="padding:15px 0px 15px 0px;background-color: '+row_bgcolor+';"><div class="col-sm-8 text-left">'+icon+'&nbsp;'+value.name+' - '+value.description+'</div><div class="col-sm-4 text-right">'+value.duration+'</div></div>';

        $('#job_progress').append(field);

      });

      var job_icon = '';

-     var job_duration = ''+data.job_duration;

-     if (data.job_status == 'Triggered'){

+     var job_duration = data.duration;

+     var job_logs = '<span style="color" rgb(232, 232, 232)">Logs</span>';

+     var job_artifacts = '<span style="color" rgb(232, 232, 232)">Artifacts</span>';

+     if (data.ref_url){

+       job_logs = '<a style="color: rgb(232, 232, 232)" href="'+data.ref_url+'console">Logs &rarr;</a>'

+     }

+     if (data.ref_url && data.status == 'FINISHED'){

+       job_artifacts = '<a style="color: rgb(232, 232, 232)" href="'+data.ref_url+'artifact">Artifacts &rarr;</a>'

+     }

+     if (data.status == 'SCHEDULED'){

        job_duration = 'Scheduled to run soon';

        job_icon = '<i class="fa fa-calendar fa-fw"></i>';

        $('#data_envelope').css({'background-color':'#535452'});

      }

-     if (data.job_status == 'Running'){

-       job_duration = 'The job is running';

+     if (data.status == 'RUNNING'){

+       job_duration = data.duration;

        job_icon = '<i class="fa fa-spinner fa-pulse fa-fw"></i>';

        $('#data_envelope').css({'background-color':'#577591'});

      }

-     if (data.job_status == 'Finished'){

-       if (computed_job_status == 'OK'){

+     if (data.status == 'FINISHED'){

+       if (data.outcome == 'COMPLETED'){

          //job_icon = '<i class="fa fa-check fa-fw text-green"></i>';

          job_icon = '<i class="fa fa-check fa-fw"></i>';

          $('#data_envelope').css({'background-color':'#639a81'});
@@ -57,9 +63,11 @@ 

      }

      $('#job_status').html(job_icon);

      $('#job_duration').text(job_duration);

+     $('#job_logs').html(job_logs);

+     $('#job_artifacts').html(job_artifacts);

  

-     if (data.job_status != 'Finished'){

-       setTimeout(show_data, 5000);

+     if (data.status != 'FINISHED'){

+       setTimeout(show_data, 1000);

      }

      else

      {
@@ -99,21 +107,15 @@ 

          <div class="col-sm-1">Type</div>

          <div class="col-sm-6">{{job.item_type}}</div>

          <div class="col-sm-1 text-right"><i title="Run time" class="fa fa-clock-o" aria-hidden="true"></i></div>

-         <div class="col-sm-4 text-left" id="job_duration"></div>

-       </div>

-       <div class="row">

-         <div class="col-sm-1">Arch</div>

-         <div class="col-sm-6">{{job.arch}}</div>

-         <div class="col-sm-1 text-right"></div>

-         <div class="col-sm-4 text-left"></div>

+         <div class="col-sm-4 text-left" id="job_duration">{{job.duration}}</div>

        </div>

      </div>

    </div>

    <div class="row" style="padding-top: 2em">

      <div class="col-sm-3 text-center">Progress</div>

-     <div class="col-sm-3 text-center"><a style="color: rgb(232, 232, 232)" href="{{buildbot_url}}{{job.link_build_log}}">Logs &rarr;</a></div>

-     <div class="col-sm-3 text-center"><a style="color: rgb(232, 232, 232)" href="{{artifacts_base_url}}/{{job.uuid}}">Artifacts &rarr;</a></div>

-     <div class="col-sm-3 text-center"><a style="color: rgb(232, 232, 232)" href="{{resultsdb_url}}/jobs/{{job.uuid}}">ResultsDB &rarr;</a></div>

+     <div class="col-sm-3 text-center" id="job_logs"><a style="color: rgb(232, 232, 232)" href="{{job.ref_url}}console">Logs &rarr;</a></div>

+     <div class="col-sm-3 text-center" id="job_artifacts"><a style="color: rgb(232, 232, 232)" href="{{job.ref_url}}artifact">Artifacts &rarr;</a></div>

+     <div class="col-sm-3 text-center"><a style="color: rgb(232, 232, 232)" href="{{resultsdb_url}}/results?groups={{job.uuid}}">ResultsDB &rarr;</a></div>

    </div>

  </div>

  <br />
@@ -121,127 +123,4 @@ 

  <div class="container" id="job_progress" style="color: #e8e8e8;">

  </div>

  

- {#

- 

- 

- 

- 

- 

- 

- 

- 

- 

- 

- <br />

- <br />

- <br />

- <br />

- <br />

- <br />

- <br />

- 

- 

- 

- 

- 

- 

- 

- 

- 

- 

- 

- 

- 

- 

- 

- 

- 

- 

- 

- 

- <h3>The job is <strong id="job_status">{{job.current_state}}</strong></h3>

- <div class="container">

-   <div class="row">

-     <div class="status_block" id="job_progress"></div>

-   </div>

- </div>

- 

- <h3>Details</h3>

- 

- <div class="container">

- 

- <div class="row">

-   <div class="col-sm-2"><strong>Taskname</strong></div>

-   <div class="col-sm-9">{{job.taskname}}</div>

- </div>

- 

- <div class="row">

-   <div class="col-sm-2"><strong>Item</strong></div>

-   <div class="col-sm-9">{{job.item}}</div>

- </div>

- 

- <div class="row">

-   <div class="col-sm-2"><strong>Item type</strong></div>

-   <div class="col-sm-9">{{job.item_type}}</div>

- </div>

- 

- <div class="row">

-   <div class="col-sm-2"><strong>Arch</strong></div>

-   <div class="col-sm-9">{{job.arch}}</div>

- </div>

- 

- <br />

- 

- <div class="row">

-   <div class="col-sm-2"><strong>Triggered at</strong></div>

-   <div class="col-sm-9">{{job.t_triggered}}</div>

- </div>

- 

- <div class="row">

-   <div class="col-sm-2"><strong>Started after</strong></div>

-   <div class="col-sm-9">{{job.started_after}}</div>

- </div>

- 

- <div class="row">

-   <div class="col-sm-2"><strong>Finished in</strong></div>

-   <div class="col-sm-9">{{job.build_took}}</div>

- </div>

- 

- <br />

- 

- <div class="row">

-   <div class="col-sm-2"><strong>FedMsg data</strong></div>

-   <div class="col-sm-9">{{job.fedmsg_data}}</div>

- </div>

- <div class="row">

-   <div class="col-sm-2"><strong>Slave</strong></div>

-   <div class="col-sm-9">

-     <a href="{{buildbot_url}}/buildslaves/{{job.slavename}}">

-       {{job.slavename}}

-       <span class="glyphicon glyphicon-new-window"></span>

-     </a>

-   </div>

- </div>

- 

- <br />

- 

- <div class="row">

-   <div class="col-sm-2"><strong>Logs</strong></div>

-   <div class="col-sm-9"><a href="{{buildbot_url}}{{job.link_build_log}}"><span class="glyphicon glyphicon-new-window"></span></a></div>

- </div>

- 

- <div class="row">

-   <div class="col-sm-2"><strong>Artifacts</strong></div>

-   <div class="col-sm-9"><a href="{{artifacts_base_url}}/{{job.uuid}}"><span class="glyphicon glyphicon-new-window"></span></a></div>

- </div>

- 

- <br />

- 

- <div class="row">

-   <div class="col-sm-2"><strong>ResultsDB</strong></div>

-   <div class="col-sm-9"><a href="{{resultsdb_url}}/groups/{{job.uuid}}"><span class="glyphicon glyphicon-new-window"></span></a></div>

- </div>

- 

- </div>

- #}

  {% endblock %}

file added
+230
@@ -0,0 +1,230 @@ 

+ # Copyright 2013, Red Hat, Inc.

+ #

+ # This program is free software; you can redistribute it and/or modify

+ # it under the terms of the GNU General Public License as published by

+ # the Free Software Foundation; either version 2 of the License, or

+ # (at your option) any later version.

+ #

+ # This program is distributed in the hope that it will be useful,

+ # but WITHOUT ANY WARRANTY; without even the implied warranty of

+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

+ # GNU General Public License for more details.

+ #

+ # You should have received a copy of the GNU General Public License along

+ # with this program; if not, write to the Free Software Foundation, Inc.,

+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

+ #

+ # Author: Josef Skladanka <jskladan@redhat.com>

+ 

+ import requests

+ import json

+ import inspect

+ import simplejson

+ import logging

+ 

+ logger = logging.getLogger('execdb_api')

+ logger.addHandler(logging.NullHandler())

+ 

+ _KEEP = object()

+ 

+ 

+ def _fparams(expand_kwargs=True):

+     """Gets the parameters of the function, from which _fparams is called

+     and returns the list of params, minus `self`"""

+     frame = inspect.currentframe().f_back

+     args, varargs, keywords, values = inspect.getargvalues(frame)

+ 

+     params = {}

+ 

+     for key in args:

+         if key == 'self':

+             continue

+         params[key] = values[key]

+ 

+     if keywords:

+         if expand_kwargs:

+             for key, value in values[keywords].iteritems():

+                 params[key] = value

+         else:

+             params[keywords] = values[keywords]

+ 

+     # RequestParser fucks up None for strings, and converts it to u'None'...

+     cleaned_up = {}

+     for key, value in params.iteritems():

+         if value is None:

+             continue

+         cleaned_up[key] = value

+ 

+     return cleaned_up

+ 

+ 

+ class ExecDBapiException(Exception):

+ 

+     def __init__(self, message='', response=None):

+         ''':param response: :class:`requests.Response` object'''

+         self.message = message

+         self.response = response

+ 

+     def __str__(self):

+         return repr(self.message)

+ 

+ 

+ class ExecDBapi(object):

+ 

+     def __init__(self, api_url):

+         # remove trailing slash(es), so we don't generate

+         # urls with a double slash which breaks werkzeug

+         # https://github.com/mitsuhiko/werkzeug/issues/491

+         self.url = api_url.rstrip('/')

+ 

+     def __raise_on_error(self, r):

+         if r.ok:

+             return

+ 

+         try:

+             logger.warn('Received HTTP failure status code %s for request: %s',

+                         r.status_code, r.url)

+             raise ExecDBapiException(

+                 '%s (HTTP %s)' % (r.json()['message'], r.status_code), r)

+         except simplejson.JSONDecodeError as e:

+             logger.debug('Received invalid JSON data: %s\n%s', e, r.text)

+             raise ExecDBapiException(

+                 'Invalid JSON (HTTP %s): %s' % (r.status_code, e), r)

+         except KeyError:

+             raise ExecDBapiException('HTTP %s Error' % r.status_code, r)

+ 

+     def __prepare_params(self, params_all):

+         params = {}

+         for key, value in params_all.iteritems():

+             if value is None:

+                 continue

+             if key == 'raw_params':

+                 continue

+ 

+             # if a param's name ends with _like, we treat it as if :like filter should be applied

+             #  for the rare case, where it really is supposed to be the name, user should provide

+             #  it in the 'raw_params' dict

+             if key.endswith('_like'):

+                 key = "%s:like" % key[:-len('_like')]

+ 

+             if type(value) in (list, tuple):

+                 params[key] = ','.join([unicode(v) for v in value])

+             else:

+                 params[key] = unicode(value)

+ 

+         if 'raw_params' in params_all.keys() and params_all['raw_params']:

+             raw_params = {

+                 key: unicode(value) for key, value in params_all['raw_params'].iteritems()}

+             params.update(raw_params)

+         return params

+ 

+     def create_job(self, taskname, item, item_type=None, ref_url=None):

+         url = "%s/jobs" % self.url

+         headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}

+         r = requests.post(url, data=json.dumps(_fparams()), headers=headers)

+         self.__raise_on_error(r)

+ 

+         return r.json()

+ 

+     def update_job(self, uuid, start=None, ref_url=None, outcome=None, duration=None):

+         url = "%s/jobs/%s" % (self.url, uuid)

+         headers = {'content-type': 'application/json', 'accept': 'text/plain'}

+         params = _fparams()

+         del(params['uuid'])

+         r = requests.post(url, data=json.dumps(params), headers=headers)

+         self.__raise_on_error(r)

+ 

+         return r.json()

+ 

+     def get_job(self, uuid):

+         url = "%s/jobs/%s" % (self.url, uuid)

+         r = requests.get(url)

+         self.__raise_on_error(r)

+ 

+         return r.json()

+ 

+     def get_jobs(self, page=None, limit=None):

+         url = "%s/jobs" % self.url

+         r = requests.get(url, params=self.__prepare_params(_fparams()))

+         self.__raise_on_error(r)

+ 

+         return r.json()

+ 

+     def create_step(self, job_uuid, description, name=None, outcome=None, duration=None):

+         url = "%s/jobs/%s/steps" % (self.url, job_uuid)

+         data = _fparams()

+         del(data['job_uuid'])

+         headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}

+         r = requests.post(url, data=json.dumps(data), headers=headers)

+         self.__raise_on_error(r)

+ 

+         return r.json()

+ 

+     def update_step(self, job_uuid, step_id='last', outcome=None, duration=None):

+         url = "%s/jobs/%s/steps/%s" % (self.url, job_uuid, step_id)

+         headers = {'content-type': 'application/json', 'accept': 'text/plain'}

+         data = _fparams()

+         del(data['job_uuid'])

+         del(data['step_id'])

+         r = requests.post(url, data=json.dumps(data), headers=headers)

+         self.__raise_on_error(r)

+ 

+         return r.json()

+ 

+ 

+ if __name__ == '__main__':

+     import time

+     import datetime

+     import random

+ 

+     dsc_map = {

+         "rm_tmp": "Cleaning-up temporary files",

+         "rm_log": "Cleaning-up log files",

+         "git": "Cloning the task repository",

+         "runtask": "Running the testsuite",

+         "MasterShellCommand": "Creating artifacts directory on master",

+         "upload": "Uploading the artifacts to master",

+         "MasterShellCommand_1": "Compressing artifacts",

+         "MasterShellCommand_2": "Publishing the artifacts",

+         }

+ 

+     api = ExecDBapi('http://localhost:5003/api/v1')

+ 

+     # Create job

+     job = api.create_job('rpmgrill', 'libtaskotron-0.4.18-1.fc25', 'koji_build')

+     time.sleep(5)

+     # Start job

+     api.update_job(job['uuid'], start=True, ref_url="http://example.com")

+     time.sleep(3)

+ 

+     # Test step autofinishing

+     for step_name, sleep_time in (('rm_tmp', 2), ('git', 8)):

+         step = api.create_step(job['uuid'], dsc_map[step_name], step_name)

+         time.sleep(sleep_time)

+     # Test manual finish

+     api.update_step(job['uuid'], step['id'], outcome="FAILED")

+ 

+     # Test duration

+     step_name = 'runtask'

+     time.sleep(17)

+     step = api.create_step(job['uuid'], dsc_map[step_name], step_name, duration=17, outcome="COMPLETED")

+ 

+     step_name = 'MasterShellCommand'

+     step = api.create_step(job['uuid'], dsc_map[step_name], step_name)

+     time.sleep(3)

+     api.update_step(job['uuid'], step['id'], duration=1)

+ 

+     # Test 'last' as step id

+     step_name = 'upload'

+     step = api.create_step(job['uuid'], dsc_map[step_name], step_name)

+     time.sleep(6)

+     api.update_step(job['uuid'], 'last', outcome='ABORTED')

+ 

+     # Do the ret of the steps

+     for step_name, sleep_time in (('MasterShellCommand_1', 3), ('MasterShellCommand_2', 5)):

+         step = api.create_step(job['uuid'], dsc_map[step_name], step_name)

+         time.sleep(sleep_time)

+ 

+     # Finish Job

+     api.update_job(job['uuid'], outcome='_COMPUTED_')

+ 

Instead of consuming the Buildbot push notification stream, ExecDB provides a comprehensive API to

create new job (returns uuid)
update job state (running, finished, ...)
create and update steps (set to finished, duration, outcome)

The default behavior tries to make stuff as reasonable as possibe:

creating a new step marks the previous one as succesfully completed (if not alredy marked as finished)
    so does setting the job to finished
on top of setting the job's outcome to a specific value, there is _COMPUTED_ special value that inferres the outcome based on the steps

UI still updates dynamically in the job detail view.

I'd like to discuss some of the implementation details

Does it make sense to have more than COMPLETED and FAILED outcomes initially?
Should status and outcome be separated, as they are now (not in DB, but in the user-facing objects)?
    My idea here is that status would be able to be SCHEDULED and RUNNING as now, but instead of FINISHED it would return the outcome value (COMPLETED, FAILED, ...)
    The current state needs you to 'care about' two different values, but the more I think about it, the less sense it makes to have that separation
Does it make sense to store the item and/or item_type? I see some value there to be able to search for "did any jobs run on this stuff, without reporting results" so this is why I kept it so far. But I guess we can revisit this once we decide on the way to solve passing 'random arguments' to libtaskotron.

UNFINISHED:

API documentation
search over jobs (name, item, state, ...)

Read the whole conversation at: https://fedorapeople.org/groups/qa/phabarchive/differentials/phab.qa.fedoraproject.org/D1150.html

rebased onto ef32b24

6 years ago

rebased onto 987cc8e

6 years ago