trigger_build.py 6.63 KB
Newer Older
1
2
3
4
5
6
7
8
9
#!/usr/bin/env python
#
# trigger_build.py - Trigger a package build and deployment on one or more FSL
# conda recipe repositories.
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#


10
import                          sys
11
12
13
14
import os.path               as op
import functools             as ft
import textwrap              as tw
import                          argparse
15
import                          datetime
16
import multiprocessing.dummy as mp
17

18
19
20
21
22
23
24
25
26
27
from   fsl_ci.recipe   import  get_recipe_variable
from   fsl_ci.platform import  get_platform_ids
from   fsl_ci.conda    import  load_meta_yaml
import fsl_ci.gitlab   as      gitlab
from   fsl_ci.gitlab   import (trigger_job,
                               get_variables,
                               download_file,
                               find_latest_job,
                               trigger_pipeline,
                               wait_on_pipeline)
28
29
30
31
32
33
34
35
36


gitlab.VERBOSE = False


SERVER_URL = 'https://git.fmrib.ox.ac.uk'
"""Default gitlab instance URL, if not specified on the command.line."""


37
38
39
40
41
def now():
    """Returns the current time as a string. """
    return datetime.datetime.now().strftime('%H:%M:%S')


42
def get_revision(recipe_path, server, token):
43
44
45
46
    """Return the value of the FSLCONDA_REVISION variable on the given
    conda recipe repository, or None if it is not set.

    The returned revision is only used for staging builds.
47
    """
48
49
50

    meta         = download_file(recipe_path, 'meta.yaml', server, token)
    project_repo = get_recipe_variable(meta, 'repository')
51
    variables    = get_variables(recipe_path, server, token)
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67

    # meta.yaml not parseable
    if project_repo is None:
        return None

    # externally hosted project
    if SERVER_URL not in project_repo:
        return None

    # Build off FSLCONDA_REVISION if set, otherwise
    # build off master (remember this only affects
    # staging builds)
    rev = variables.get('FSLCONDA_REVISION', None)

    if rev is not None: return rev
    else:               return 'master'
68
69


70
71
72
def trigger_build(project, server, token, production):
    """Triggers a pipeline on the master branch of project and waits for it
    to complete.
73
74
    """

75
76
    rev = get_revision(project, server, token)

77
78
79
80
81
    if production:
        channel   = 'production'
        variables = {}
    else:
        channel   = 'staging'
82
83
84
85
        variables = {'STAGING' : 'true'}

        if rev is not None:
            variables['FSLCONDA_REVISION'] = rev
86

87
    try:
88
89
90
        pipeline = trigger_pipeline(
            project, 'master', server, token, variables)
    except Exception:
91
92
93
        return None

    pid = pipeline['id']
94

95
    print(f'{now()} Pipeline triggered on {project} ({channel} build) '
96
          f'- see {pipeline["web_url"]}')
97

98
99
    try:
        status = wait_on_pipeline(project, pid, server, token)
100
    except Exception:
101
        return None
102

103
    print(f'{now()} Build pipeline for {project} has finished: {status}')
104
105

    if status != 'manual':
106
107
108
109
110
111
112
113
114
115
        return None

    return pid


def trigger_deploy(project, pid, server, token, production):
    """Triggers the most recently created manual 'deploy-conda-package' job,
    and waits for it to complete.
    """

116
117
118
119
120
    # trigger_build returns None
    # if the build failed
    if pid is None:
        return False

121
122
123
124
125
    # deployment to staging/production gets set at
    # build time, so we don't need to pass the STAGING
    # variable here, like we do in trigger_build
    if production: channel = 'production'
    else:          channel = 'staging'
126

127
128
129
130
    meta      = download_file(project, 'meta.yaml', server, token)
    meta      = load_meta_yaml(meta)
    platforms = get_platform_ids(meta)

131
    print(f'{now()} Triggering deploy-{platforms}-conda-package jobs '
132
          f'on {project} (deploying to {channel} channel)')
133

134
    try:
135
136
        jids = []
        for platform in platforms:
137
            jids.extend(find_latest_job(
138
                project, server, token, f'deploy-{platform}-conda-package', 1))
139
140
        for j in jids:
            trigger_job(project, j['id'], server, token)
141

142
        status = wait_on_pipeline(project, pid, server, token)
143
144
        print(f'{now()} Deploy job {project} has finished: {status}')
        return True
145
146
    except Exception as e:
        print(f'Error triggering deploy job on {project}: {e}')
147
        return False
148

149

150
def parseArgs(argv=None):
151
152
153
154
155
156
157
158
159
160
    """Parses and returns command line arguments. """

    name   = op.basename(__file__)
    usage  = f'Usage: {name} -t <token> [options] project [project ...]'
    desc  = tw.dedent("""
    Trigger a package build and deployment on
    one or more FSL conda recipe repositories.
    """).strip()

    helps = {
161
162
163
        'token'      : 'Gitlab API access token with read+write access',
        'server'     :  f'Gitlab server (default: {SERVER_URL})',
        'project'    : 'Project(s) to build',
164
        'production' : 'Build production/stable version '
165
166
167
                       '(default: label built package as staging/development)',
        'sequential' : 'Build projects sequentially (default: build in '
                       'parallel',
168
169
170
171
172
173
174
175
176
    }

    parser = argparse.ArgumentParser(usage=usage, description=desc)
    parser.add_argument('project', nargs='+',
                        help=helps['project'])
    parser.add_argument('-s', '--server', default=SERVER_URL,
                        help=helps['server'])
    parser.add_argument('-t', '--token', required=True,
                        help=helps['token'])
177
178
    parser.add_argument('-p', '--production', action='store_true',
                        help=helps['production'])
179
180
    parser.add_argument('-q', '--sequential', action='store_true',
                        help=helps['sequential'])
181

182
    return parser.parse_args(argv)
183
184


185
def main(argv=None):
186
187
188
189
    """Trigger builds on all listed projects concurrently, and wait for them
    all to complete or fail.
    """

190
    args     = parseArgs(argv)
191
    projects = args.project
192
193
194
195
196
197
198
199
    build    = ft.partial(trigger_build,
                          server=args.server,
                          token=args.token,
                          production=args.production)
    deploy   = ft.partial(trigger_deploy,
                          server=args.server,
                          token=args.token,
                          production=args.production)
200
    result   = True
201
202
203
204

    if args.sequential:
        for project in projects:
            pid = build(project)
205
206
            if not deploy(project, pid):
                result = False
207
    else:
208
209
210
        pool   = mp.Pool(len(projects))
        pids   = pool.map(build, projects)
        result = all(pool.starmap(deploy, zip(projects, pids)))
211
212
213
        pool.close()
        pool.join()

214
    return 0 if result else 1
215
216
217


if __name__  == '__main__':
218
    sys.exit(main())