#!/usr/bin/env python # # trigger_build.py - Trigger a package build and deployment on one or more FSL # conda recipe repositories. # # Author: Paul McCarthy # import sys import os.path as op import functools as ft import textwrap as tw import argparse import datetime import multiprocessing.dummy as mp from fsl_ci.recipe import get_recipe_variable from fsl_ci.platform import get_platform_ids from fsl_ci.conda import load_meta_yaml import fsl_ci.gitlab as gitlab from fsl_ci.gitlab import (trigger_job, get_variables, download_file, find_latest_job, trigger_pipeline, wait_on_pipeline) gitlab.VERBOSE = False SERVER_URL = 'https://git.fmrib.ox.ac.uk' """Default gitlab instance URL, if not specified on the command.line.""" def now(): """Returns the current time as a string. """ return datetime.datetime.now().strftime('%H:%M:%S') def get_revision(recipe_path, server, token): """Return the value of the FSLCONDA_REVISION variable on the given conda recipe repository, or None if it is not set. The returned revision is only used for staging builds. """ meta = download_file(recipe_path, 'meta.yaml', server, token) project_repo = get_recipe_variable(meta, 'repository') variables = get_variables(recipe_path, server, token) # meta.yaml not parseable if project_repo is None: return None # externally hosted project if SERVER_URL not in project_repo: return None # Build off FSLCONDA_REVISION if set, otherwise # build off master (remember this only affects # staging builds) rev = variables.get('FSLCONDA_REVISION', None) if rev is not None: return rev else: return 'master' def trigger_build(project, server, token, production): """Triggers a pipeline on the master branch of project and waits for it to complete. """ rev = get_revision(project, server, token) if production: channel = 'production' variables = {} else: channel = 'staging' variables = {'STAGING' : 'true'} if rev is not None: variables['FSLCONDA_REVISION'] = rev try: pipeline = trigger_pipeline( project, 'master', server, token, variables) except Exception: return None pid = pipeline['id'] print(f'{now()} Pipeline triggered on {project} ({channel} build) ' f'- see {pipeline["web_url"]}') try: status = wait_on_pipeline(project, pid, server, token) except Exception: return None print(f'{now()} Build pipeline for {project} has finished: {status}') if status != 'manual': return None return pid def trigger_deploy(project, pid, server, token, production): """Triggers the most recently created manual 'deploy-conda-package' job, and waits for it to complete. """ # trigger_build returns None # if the build failed if pid is None: return False # deployment to staging/production gets set at # build time, so we don't need to pass the STAGING # variable here, like we do in trigger_build if production: channel = 'production' else: channel = 'staging' meta = download_file(project, 'meta.yaml', server, token) meta = load_meta_yaml(meta) platforms = get_platform_ids(meta) print(f'{now()} Triggering deploy-{platforms}-conda-package jobs ' f'on {project} (deploying to {channel} channel)') try: jids = [] for platform in platforms: jids.extend(find_latest_job( project, server, token, f'deploy-{platform}-conda-package', 1)) for j in jids: trigger_job(project, j['id'], server, token) status = wait_on_pipeline(project, pid, server, token) print(f'{now()} Deploy job {project} has finished: {status}') return True except Exception as e: print(f'Error triggering deploy job on {project}: {e}') return False def parseArgs(argv=None): """Parses and returns command line arguments. """ name = op.basename(__file__) usage = f'Usage: {name} -t [options] project [project ...]' desc = tw.dedent(""" Trigger a package build and deployment on one or more FSL conda recipe repositories. """).strip() helps = { 'token' : 'Gitlab API access token with read+write access', 'server' : f'Gitlab server (default: {SERVER_URL})', 'project' : 'Project(s) to build', 'production' : 'Build production/stable version ' '(default: label built package as staging/development)', 'sequential' : 'Build projects sequentially (default: build in ' 'parallel', } parser = argparse.ArgumentParser(usage=usage, description=desc) parser.add_argument('project', nargs='+', help=helps['project']) parser.add_argument('-s', '--server', default=SERVER_URL, help=helps['server']) parser.add_argument('-t', '--token', required=True, help=helps['token']) parser.add_argument('-p', '--production', action='store_true', help=helps['production']) parser.add_argument('-q', '--sequential', action='store_true', help=helps['sequential']) return parser.parse_args(argv) def main(argv=None): """Trigger builds on all listed projects concurrently, and wait for them all to complete or fail. """ args = parseArgs(argv) projects = args.project build = ft.partial(trigger_build, server=args.server, token=args.token, production=args.production) deploy = ft.partial(trigger_deploy, server=args.server, token=args.token, production=args.production) result = True if args.sequential: for project in projects: pid = build(project) if not deploy(project, pid): result = False else: pool = mp.Pool(len(projects)) pids = pool.map(build, projects) result = all(pool.starmap(deploy, zip(projects, pids))) pool.close() pool.join() return 0 if result else 1 if __name__ == '__main__': sys.exit(main())