Commit c54ac0a5 authored by Paul McCarthy's avatar Paul McCarthy 🚵
Browse files

RF: re-arrange python utils a little

parent 466bb5ec
......@@ -117,7 +117,7 @@ build-conda-package:
PREBUILD: ""
script:
- bash /ci_rules/scripts/build_conda_package.py
- python3 /ci_rules/scripts/build_conda_package.py
./conda_build "$PROJECT_REF" "$STAGING" "$PREBUILD"
# We propagate the variables on
......
#!/usr/bin/env bash
#
# Build a conda package from a FSL recipe repository.
set -e
# This script must be passed the following aargments:
#
# - Output directory to store built conda package
#
# - Name of the git ref (e.g. tag, branch) to build the
# recipe from. If empty, ref specified in the recipe
# meta.yaml file is used.
#
# - An empty or non-empty value to indicate which
# FSL conda channel URL to download dependencies from.
# If empty, the FSLCONDA_PRODUCTION_CHANNEL_URL is
# used, otherwise the FSLCONDA_STAGING_CHANNEL_URL is
# used.
#
# (See rules/fsl-ci-conda-rules.yml)
output_dir=$1
project_ref=$2
staging=$3
if [[ -z "$project_ref" ]]; then
unset FSLCONDA_REVISION
else
export FSLCONDA_REVISION=$project_ref
fi
if [[ -z $staging ]]; then
channel_url="$FSLCONDA_PRODUCTION_CHANNEL_URL"
else
channel_url="$FSLCONDA_STAGING_CHANNEL_URL"
fi
echo "************************************"
echo "Building conda recipe for: $CI_PROJECT_NAME"
echo "Recipe URL: $CI_PROJECT_URL"
echo "Revision (empty means to build release"
echo " specified in meta.yaml): $FSLCONDA_REVISION"
echo "FSL conda channel URL: $channel_url"
echo "************************************"
conda install -y -c conda-forge conda-build
conda build \
-c conda-forge \
-c defaults \
-c $channel_url \
--output-folder=$output_dir \
./
......@@ -11,9 +11,9 @@ import os
import sys
import time
from fsl_ci_utils import (get_recipe_url,
trigger_pipeline,
get_pipeline_status)
from fsl_ci_utils.conda import get_recipe_url
from fsl_ci_utils.conda_api import (trigger_pipeline,
get_pipeline_status)
GITLAB_URL = os.environ['CI_SERVER_URL']
......
......@@ -12,14 +12,12 @@
import os
import re
import sys
import urllib.parse as urlparse
from fsl_ci_utils import (get_recipe_url,
tempdir,
http_request,
sprun,
lookup_project_id,
open_merge_request)
from fsl_ci_utils import tempdir, sprun
from fsl_ci_utils.conda import get_recipe_url
from fsl_ci_utils.conda_api import (http_request,
lookup_project_id,
open_merge_request)
GITLAB_URL = os.environ['CI_SERVER_URL']
......@@ -100,7 +98,7 @@ def patch_recipe(metayaml, newversion):
f.write('\n'.join(meta))
def checkout_and_patch_recipe(url, branch, project_version):
def checkout_and_patch_recipe(url, branch, project_version):
with tempdir():
sprun(f'git clone {url} recipe')
......
......@@ -15,18 +15,18 @@ import urllib.error as urlerror
import yaml
from fsl_ci_utils import (get_project_metadata,
set_project_metadata,
list_project_branches,
download_file,
update_file,
find_suitable_runners,
enable_runner,
get_runner_metadata,
create_or_update_variable,
is_standard_recipe_path,
create_branch,
open_merge_request)
from fsl_ci_utils.conda_api import (get_project_metadata,
set_project_metadata,
list_project_branches,
download_file,
update_file,
find_suitable_runners,
enable_runner,
get_runner_metadata,
create_or_update_variable,
create_branch,
open_merge_request)
from fsl_ci_utils.conda import is_standard_recipe_path
SERVER_URL = 'https://git.fmrib.ox.ac.uk'
......@@ -97,7 +97,7 @@ def patch_gitlab_ci_yml(project_path, server, token, ci_path):
contents = yaml.load(f, Loader=yaml.Loader)
stages = ['fsl-ci-pre', 'fsl-ci-build', 'fsl-ci-deploy']
stages = [s for s in stages if s not in s not in contents['stages']]
stages = [s for s in stages if s not in contents['stages']]
include = {
'project' : rulesrepo,
'file' : rulespath
......
......@@ -15,13 +15,14 @@ import argparse
import jinja2 as j2
from fsl_ci_utils import (indir,
tempdir,
http_request,
lookup_namespace_id,
sprun,
get_project_version,
gen_recipe_path)
from fsl_ci_utils import (indir,
tempdir,
sprun)
from fsl_ci_utils.conda_api import (http_request,
lookup_namespace_id,
get_project_version)
from fsl_ci_utils.conda import gen_recipe_path
import fsl_project_dependencies as fsldeps
......@@ -105,7 +106,7 @@ def create_gitlab_recipe_repository(recipe_dir, recipe_path, server, token):
http_request(url, token, data)
with indir(recipe_dir):
sprun(f'git push origin master')
sprun('git push origin master')
def get_fsl_project_dependencies(project_path, project_dir, server, token):
......
#!/usr/bin/env python
#
# __init__.py -
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
import os
import shlex
import tempfile
import contextlib as ctxlib
import subprocess as sp
import fsl_ci_utils.conda_api as api # noqa # pylint: disable=unused-import
import fsl_ci_utils.conda as conda # noqa # pylint: disable=unused-import
@ctxlib.contextmanager
def tempdir():
"""Context manager to create, and change into, a temporary directory, and
then afterwards delete it and change back to the original working
directory.
"""
with tempfile.TemporaryDirectory() as td:
prevdir = os.getcwd()
os.chdir(td)
try:
yield td
finally:
os.chdir(prevdir)
@ctxlib.contextmanager
def indir(dirname):
"""Context manager to change into a directory, and then afterwards
change back to the original working directory.
"""
prevdir = os.getcwd()
os.chdir(dirname)
try:
yield
finally:
os.chdir(prevdir)
def sprun(cmd, **kwargs):
"""Runs the given command with subprocess.run. """
print(f'Running {cmd}')
cmd = shlex.split(cmd)
return sp.run(cmd, check=True, **kwargs)
#!/usr/bin/env python
#
# Functions for working with FSL conda recipe repositories, and with conda
# channels.
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
import os
import re
import dataclasses
import urllib.parse as urlparse
from typing import Dict, List, Union, Tuple
import networkx as nx
import jinja2 as j2
import yaml
from fsl_ci_utils.conda_api import (http_request,)
def gen_recipe_path(project_path):
"""Generates a conda recipe project name from the given project.
If the project name has one of the following forms (case doesn't matter):
- "fsl-<project>"
- "fsl_<project>"
the recipe name will be "fsl-<project>". Otherwise, the recipe name will
be "fsl-<project_name>".
The full recipe path will be "fsl/conda/<recipe_name>"
"""
# strip namespaced in case
# fsl/<project> was passed in
project_name = project_path.rsplit('/')[-1]
match = re.match(r'^(fsl[_-])?(.*)$', project_name, flags=re.IGNORECASE)
suffix = match.group(2).lower()
return f'fsl/conda/fsl-{suffix}'
def get_recipe_url(project_name, server) -> Dict[str, str]:
"""Figures out the URL to the conda recipe associated with the specified
FSL project.
"""
# Figure out the name of the conda recipe associated with
# this project. It defaults to "fsl-<project>", but can
# be overridden via the FSLCONDA_RECIPE_URL environment
# variable.
url = os.environ.get('FSLCONDA_RECIPE_URL')
project_name = project_name.rsplit('/')[-1]
if url is None:
url = f'{server}/fsl/conda/fsl-{project_name}.git'
path = urlparse.urlparse(url).path.replace('.git', '')[1:]
name = path.rsplit('/', 1)[-1]
return {'url' : url, 'name' : name, 'path' : path}
def is_standard_recipe_path(project_path, recipe_path):
"""Returns True if the given recipe path follows the "standard" convention
of being "fsl/conda/fsl-<project_name>", False otherwise.
"""
project_name = project_path.rsplit('/')[-1].lower()
return recipe_path == f'fsl/conda/fsl-{project_name}'
def load_meta_yaml(filename):
"""Load a conda recipe meta.yaml file."""
env = {
'os' : os,
'load_setup_py_data' : lambda : {},
}
with open(filename, 'rt') as f:
template = j2.Template(f.read())
meta = template.render(**env)
meta = yaml.load(meta, Loader=yaml.Loader)
return meta
@dataclasses.dataclass
class Package:
name : str
"""Package base name."""
version : str
"""Latest version available (for any platform). We assume that the
same versions are available for all supported platforms.
"""
platforms : List[str]
"""List of platforms (e.g. "noarch", "osx-64") for which this package
is available.
"""
dependencies : List[Union[str, 'Package']]
"""References to all packages which this package depends on. Stored
as a reference to another Package object for other packages hosted
on the same channel, or a "package [version-constraint]" string for
packages hosted elsewhere.
"""
def read_channel_repodata(channel_url : str) -> Tuple[Dict, Dict]:
"""Loads channel and platform metadata from the channel. """
# load channel and platform metadata - the
# first gives us a list of all packages that
# are hosted on the channel, and the second
# gives us the dependencies of each package.
channeldata = http_request(f'{channel_url}/channeldata.json')
platformdata = {}
for platform in channeldata['subdirs']:
url = f'{channel_url}/{platform}/repodata.json'
platformdata[platform] = http_request(url)
return channeldata, platformdata
def load_packages(channeldata : Dict,
platformdata : Dict) -> Dict[str, Package]:
"""Creates a Package object for every package hosted in the channel. """
packages = {}
# The channeldata.json file contains an entry for
# every package hosted on the channel. We use this
# to create a Package object for every package in
# the channel, but without dependencies - we'll do
# this in a second pass below.
for name, meta in channeldata['packages'].items():
version = meta['version']
platforms = meta['subdirs']
packages[name] = Package(name, version, platforms, [])
# Load dependency information - this is stored
# in platform-specific repodata.json files.
for pkg in packages.values():
# assume that version/dependencies are
# identical across all supported platforms
platform = pkg.platforms[0]
# repodata contains one entry for every package
# file hosted in the channel (i.e. there will be
# multiple files for each package, each for a
# different version). Find the entry corresponding
# to the latest version.
print(f'Searching for {pkg.name} == {version} in {platform}...')
for fmeta in platformdata[platform]['packages'].values():
if fmeta['name'] == pkg.name and \
fmeta['version'] == pkg.version:
break
else:
raise RuntimeError(f'Cannot find package {pkg.name} in '
f'{platform} sub-directory!')
# The entry for each package file contains a list
# of "package [version-constraint]" strings.
for dep in fmeta['depends']:
name = dep.split()[0]
# Externally hosted package - store
# the full dependency string
if name not in packages:
pkg.dependencies.append(dep)
# Another package hosted in this
# channel - store a ref to the
# Package object
else:
pkg.dependencies.append(packages[name])
return packages
def build_dependency_graph(packages : Dict[str, Package]) -> nx.DiGraph:
"""Builds a directed dependency graph from the given collection of
packages. Externally hosted packages are not included in the graph.
"""
g = nx.DiGraph()
# nodes
for pkg in packages.values():
g.add_node(pkg.name)
# edges
for pkg in packages.values():
for dep in pkg.dependencies:
if isinstance(dep, Package):
g.add_edge(pkg.name, dep.name)
return g
#!/usr/bin/env python
#
# Functions for interacting with Gitlab over its HTTP REST API.
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
import json
import datetime
import string
import base64 as b64
import functools as ft
import urllib.parse as urlparse
import urllib.request as urlrequest
def http_request(url, token=None, data=None, method=None):
"""Submit a HTTP request to the given URL. """
if method is None:
if data is None: method = 'GET'
else: method = 'POST'
print(f'{method} {url} ...')
headers = {}
if token is not None:
headers['PRIVATE-TOKEN'] = token
if data is not None:
headers['Content-Type'] = 'application/json'
data = json.dumps(data).encode('utf-8')
request = urlrequest.Request(
url, headers=headers, data=data, method=method)
response = urlrequest.urlopen(request).read()
return json.loads(response)
@ft.lru_cache()
def lookup_project_id(project_path, server, token):
"""Look up the integer ID of a gitlab project from its fully qualified
path.
"""
project_path = urlparse.quote_plus(project_path)
url = f'{server}/api/v4/projects/{project_path}'
return http_request(url, token)['id']
def lookup_namespace_id(namespace_path, server, token):
"""Look up the integer ID of a gitlab namespace from its fully qualified
path.
"""
url = f'{server}/api/v4/namespaces'
namespaces = http_request(url, token)
for n in namespaces:
if n['path'] == namespace_path:
return n['id']
raise ValueError(f'No namespace matching {namespace_path}')
def get_available_runners(project_path, server, token):
"""Returns a list of the IDs of all specific runners
which are available to be used for the given project,
and are not already enabled for it.
"""
# get all specific runners available to the user
url = f'{server}/api/v4/runners'
runners = http_request(url, token)
# remove inactive ones, and ones that are already
# enabled
runners = [r for r in runners if r['online'] and r['active']]
runners = [r for r in runners if not runner_is_enabled(
project_path, r['id'], server, token)]
return [r['id'] for r in runners]
def get_runner_metadata(runner_id, server, token):
"""Returns metadata for the specified runner. """
url = f'{server}/api/v4/runners/{runner_id}'
response = http_request(url, token)
return response
def get_runner_tags(runner_id, server, token):
"""Returns the list of tags for the specified runner. """
return get_runner_metadata(runner_id, server, token)['tag_list']
def find_suitable_runners(project_path, tags, server, token):
"""Identifies runners with the specified set of tags which
are available to be used on the given project.
"""
def match(runner_tags):
return all([t.lower() in runner_tags for t in tags])
rids = get_available_runners(project_path, server, token)
rtags = [get_runner_tags(r, server, token) for r in rids]
return [r for r, t in zip(rids, rtags) if match(t)]
def lookup_project_tags(project_path, server, token):
"""Return the a list of tags for the given project, or an empty list
if the project has no tags.
The tags are sorted such that the most recently updated tag is first
in the list.
"""
pid = lookup_project_id(project_path, server, token)
url = f'{server}/api/v4/projects/{pid}/repository/tags'
tags = http_request(url, token)
tags = [t['name'] for t in tags]
return tags
def is_acceptable(version):
"""Return True if the given version/tag is "acceptable" - it must
be a sequence of integers, separated by periods, with an optional
leading 'v'.
"""
if version.lower().startswith('v'):
version = version[1:]
for part in version.split('.'):
if not all([c in string.digits for c in part]):
return False
return True
def get_project_version(project_path, server, token):
"""Return the most recent version of the specified project, returning
"YYMM.0" if the project does not have any tags.
"""
tags = lookup_project_tags(project_path, server, token)
version = None
if len(tags) >= 0 and is_acceptable(tags[0]):
version = tags[0]
if version is None:
version = datetime.date.today().strftime('%y%m')
version = f'{version}.0'
return version
def list_project_branches(project_path, server, token):
"""Returns a list of all branches of the project. """
pid = lookup_project_id(project_path, server, token)
url = f'{server}/api/v4/projects/{pid}/repository/branches'
response = http_request(url, token)
return [r['name'] for r in response]
def get_project_metadata(project_path, server, token):
"""Returns metadata for the specifie project."""
pid = lookup_project_id(project_path, server, token)
url = f'{server}/api/v4/projects/{pid}/'
return http_request(url, token)
def set_project_metadata(project_path, server, token, data):
"""Sets metadata for the specifie project."""
pid = lookup_project_id(project_path, server, token)
url = f'{server}/api/v4/projects/{pid}/'
http_request(url, token, data, method='PUT')
@ft.lru_cache()
def download_file(
project_path, filename, server, token, ref='master', text=True):
"""Download the specified from the specified branch/ref of the project. """