Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • paulmc/fslpy
  • ndcn0236/fslpy
  • seanf/fslpy
3 results
Show changes
Commits on Source (1635)
Showing with 2135 additions and 298 deletions
#!/usr/bin/env bash
set -e
name=$1
version=$2
# add any extra channels that are needed
for channel in $FSL_CONDA_CHANNELS; do
conda config --append channels $channel
done
# make sure to update fundamental
# packages from the default channel
conda update --yes -c defaults -n base conda
conda install --yes -c defaults -n base setuptools conda-build
# insert project name/version into meta.yaml
echo "{% set name = '$name' %}" > vars.txt
echo "{% set version = '$version' %}" >> vars.txt
cat vars.txt .conda/meta.yaml > tempfile
mv tempfile .conda/meta.yaml
rm vars.txt
mkdir -p dist/conda-bld
conda build --output-folder=dist/conda-bld .conda
# Make sure package is installable
for pyver in 2.7 3.4 3.5 3.6; do
conda create -y --name "test$pyver" python=$pyver
source activate test$pyver
conda install -y -c file://`pwd`/dist/conda-bld $name
source deactivate
done
......@@ -2,5 +2,6 @@
set -e
python setup.py doc
mv doc/html doc/"$CI_COMMIT_REF_NAME"
source /test.venv/bin/activate
pip install ".[doc]"
sphinx-build doc public
......@@ -2,27 +2,15 @@
set -e
pip install wheel
python setup.py sdist
python setup.py bdist_wheel
pip install --upgrade pip wheel setuptools twine build
python -m build
twine check dist/*
# do a test install from both source and wheel
sdist=`find dist -maxdepth 1 -name *.tar.gz`
wheel=`find dist -maxdepth 1 -name *.whl`
# pip < 10 will not install wheels
# with an invalid name. So we can
# generate builds from non-releases
# (e.g. master master branch),
# we hack the wheel file name here
# so that pip will accept it.
#
# This will no longer be necessary
# when pip 10 is available.
nwheel=`echo -n $wheel | sed -e 's/fslpy-/fslpy-0/g'`
mv $wheel $nwheel
wheel=$nwheel
for target in $sdist $wheel; do
python -m venv test.venv
. test.venv/bin/activate
......
#!/usr/bin/env bash
set -e
cat fsl/version.py | egrep "^__version__ += +'$CI_COMMIT_REF_NAME' *$"
#!/usr/bin/env bash
set -e
rsync -rv dist/conda-bld/ --exclude 'repodata*' --exclude "*json" "condadeploy:"
ssh condaindex
#!/usr/bin/env bash
set -e
rsync -rv doc/"$CI_COMMIT_REF_NAME" "docdeploy:"
#!/usr/bin/env bash
set -e
if [[ "x$CI_COMMIT_TAG" != "x" ]]; then
echo "Release detected - patching version - $CI_COMMIT_REF_NAME";
python -c "import fsl.version as v; v.patchVersion('fsl/version.py', '$CI_COMMIT_REF_NAME')";
fi
......@@ -21,54 +21,39 @@ set -e
if [[ -f /.dockerenv ]]; then
# We have to use different host names to connect
# to the docker daemon host on mac as opposed
# to on linux.
#
# On linux (assuming the docker job is running
# with --net=host), we can connect via
# username@localhost.
#
# On mac, we have to connect via
# username@host.docker.internal
if [[ "$CI_RUNNER_TAGS" == *"macOS"* ]]; then
if [[ "$FSL_HOST" == *"@localhost" ]]; then
FSL_HOST=${FSL_HOST/localhost/host.docker.internal}
fi
fi
apt-get update -y || yum -y check-update || true;
apt-get install -y openssh-client rsync git || yum install -y openssh-client rsync git || true;
eval $(ssh-agent -s);
mkdir -p $HOME/.ssh;
echo "$SSH_PRIVATE_KEY_GIT" > $HOME/.ssh/id_git;
# for downloading FSL atlases/standards
echo "$SSH_PRIVATE_KEY_FSL_DOWNLOAD" > $HOME/.ssh/id_fsl_download;
if [[ "$CI_PROJECT_PATH" == "$UPSTREAM_PROJECT" ]]; then
echo "$SSH_PRIVATE_KEY_DOC_DEPLOY" > $HOME/.ssh/id_doc_deploy;
echo "$SSH_PRIVATE_KEY_CONDA_DEPLOY" > $HOME/.ssh/id_conda_deploy;
echo "$SSH_PRIVATE_KEY_CONDA_INDEX" > $HOME/.ssh/id_conda_index;
fi;
chmod go-rwx $HOME/.ssh/id_*;
ssh-add $HOME/.ssh/id_git;
ssh-add $HOME/.ssh/id_fsl_download;
if [[ "$CI_PROJECT_PATH" == "$UPSTREAM_PROJECT" ]]; then
ssh-add $HOME/.ssh/id_doc_deploy;
ssh-add $HOME/.ssh/id_conda_deploy;
fi
echo "$SSH_SERVER_HOSTKEYS" > $HOME/.ssh/known_hosts;
ssh-keyscan ${FSL_HOST##*@} >> $HOME/.ssh/known_hosts;
touch $HOME/.ssh/config;
echo "Host ${UPSTREAM_URL##*@}" >> $HOME/.ssh/config;
echo " User ${UPSTREAM_URL%@*}" >> $HOME/.ssh/config;
echo " IdentityFile $HOME/.ssh/id_git" >> $HOME/.ssh/config;
echo "Host docdeploy" >> $HOME/.ssh/config;
echo " HostName ${DOC_HOST##*@}" >> $HOME/.ssh/config;
echo " User ${DOC_HOST%@*}" >> $HOME/.ssh/config;
echo " IdentityFile $HOME/.ssh/id_doc_deploy" >> $HOME/.ssh/config;
echo "Host condadeploy" >> $HOME/.ssh/config;
echo " HostName ${CONDA_HOST##*@}" >> $HOME/.ssh/config;
echo " User ${CONDA_HOST%@*}" >> $HOME/.ssh/config;
echo " IdentityFile $HOME/.ssh/id_conda_deploy" >> $HOME/.ssh/config;
echo "Host condaindex" >> $HOME/.ssh/config;
echo " HostName ${CONDA_HOST##*@}" >> $HOME/.ssh/config;
echo " User ${CONDA_HOST%@*}" >> $HOME/.ssh/config;
echo " IdentityFile $HOME/.ssh/id_conda_index" >> $HOME/.ssh/config;
echo "Host fsldownload" >> $HOME/.ssh/config;
echo " HostName ${FSL_HOST##*@}" >> $HOME/.ssh/config;
echo " User ${FSL_HOST%@*}" >> $HOME/.ssh/config;
......
......@@ -2,43 +2,36 @@
set -e
# If running on a fork repository, we merge in the
# upstream/master branch. This is done so that merge
# requests from fork to the parent repository will
# have unit tests run on the merged code, something
# which gitlab CE does not currently do for us.
if [[ "$CI_PROJECT_PATH" != "$UPSTREAM_PROJECT" ]]; then
git fetch upstream;
git merge --no-commit --no-ff upstream/master;
fi;
source /test.venv/bin/activate
pip install --retries 10 -r requirements.txt
pip install --retries 10 -r requirements-extra.txt
pip install --retries 10 -r requirements-dev.txt
pip install ".[extra,test,style]"
# style stage
if [ "$TEST_STYLE"x != "x" ]; then pip install --retries 10 pylint flake8; fi;
if [ "$TEST_STYLE"x != "x" ]; then flake8 fsl || true; fi;
if [ "$TEST_STYLE"x != "x" ]; then pylint --output-format=colorized fsl || true; fi;
if [ "$TEST_STYLE"x != "x" ]; then exit 0; fi
if [ "$TEST_STYLE"x != "x" ]; then exit 0; fi;
# We need the FSL atlases for the atlas
# tests, and need $FSLDIR to be defined
export FSLDIR=/fsl/
mkdir -p $FSLDIR/data/
rsync -rv "fsldownload:data/atlases/" "$FSLDIR/data/atlases/"
rsync -rv "fsldownload:$FSL_ATLAS_DIR" "$FSLDIR/data/atlases/"
# Finally, run the damned tests.
# Run the tests. Suppress coverage
# reporting until after we're finished.
TEST_OPTS="--cov-report= --cov-append"
# pytest struggles with my organisation of
# the fslpy package, where all tests are in
# fsl.tests, and fsl is a namespace package
touch fsl/__init__.py
# We run some tests under xvfb-run
# because they invoke wx. Sleep in
# between, otherwise xvfb gets upset.
xvfb-run python setup.py test --addopts="$TEST_OPTS tests/test_idle.py"
xvfb-run -a pytest $TEST_OPTS fsl/tests/test_idle.py
sleep 5
xvfb-run python setup.py test --addopts="$TEST_OPTS tests/test_platform.py"
xvfb-run -a pytest $TEST_OPTS fsl/tests/test_platform.py
# We run the immv/imcp tests as the nobody
# user because some tests expect permission
......@@ -47,16 +40,20 @@ xvfb-run python setup.py test --addopts="$TEST_OPTS tests/test_platform.py"
# this directory writable by anybody (which,
# unintuitively, includes nobody)
chmod -R a+w `pwd`
cmd="source /test.venv/bin/activate && python setup.py test"
cmd="$cmd --addopts='$TEST_OPTS tests/test_immv_imcp.py'"
cmd="source /test.venv/bin/activate && pytest"
cmd="$cmd $TEST_OPTS fsl/tests/test_scripts/test_immv_imcp.py fsl/tests/test_immv_imcp.py"
su -s /bin/bash -c "$cmd" nobody
# All other tests can be run as normal.
python setup.py test --addopts="$TEST_OPTS -m 'not longtest' --ignore=tests/test_idle.py --ignore=tests/test_platform.py --ignore=tests/test_immv_imcp.py"
pytest $TEST_OPTS -m 'not longtest' \
--ignore=fsl/tests/test_idle.py \
--ignore=fsl/tests/test_platform.py \
--ignore=fsl/tests/test_immv_imcp.py \
--ignore=fsl/tests/test_scripts/test_immv_imcp.py
# Long tests are only run on release branches
if [[ $CI_COMMIT_REF_NAME == v* ]]; then
python setup.py test --addopts="$TEST_OPTS -m 'longtest'"
pytest $TEST_OPTS -m 'longtest'
fi
python -m coverage report
python -m coverage report -i
#!/usr/bin/env python
#
# Deposit a new version of something on zenodo.
#
# It is assumed that a deposit already exists on zenodo - you must
# specify the deposit ID of that original deposit.
#
# http://developers.zenodo.org/#rest-api
import os.path as op
import sys
import json
import jinja2 as j2
import requests
def deposit(zenodo_url, access_token, dep_id, upload_file, meta):
urlbase = '{}/api/deposit/depositions'.format(zenodo_url)
headers = {'Content-Type': 'application/json'}
params = {'access_token' : access_token}
# Create a new deposit
url = '{}/{}/actions/newversion'.format(urlbase, dep_id)
print('Creating new deposit: {}'.format(url))
r = requests.post(url, params=params)
if r.status_code != 201:
raise RuntimeError('POST {} failed: {}'.format(url, r.status_code))
newurl = r.json()['links']['latest_draft']
dep_id = newurl.split('/')[-1]
print("New deposition ID: {}".format(dep_id))
# Upload the file
data = {'filename': op.basename(upload_file)}
files = {'file': open(upload_file, 'rb')}
url = '{}/{}/files'.format(urlbase, dep_id)
print('Uploading file: {}'.format(url))
r = requests.post(url, params=params, data=data, files=files)
if r.status_code != 201:
raise RuntimeError('POST {} failed: {}'.format(url, r.status_code))
# Upload the metadata
url = '{}/{}?access_token={}'.format(urlbase, dep_id, access_token)
print('Uploading metadata: {}'.format(url))
r = requests.put(url, data=json.dumps(meta), headers=headers)
if r.status_code != 200:
print(r.json())
raise RuntimeError('PUT {} failed: {}'.format(url, r.status_code))
# Publish
url = '{}/{}/actions/publish'.format(urlbase, dep_id)
print('Publishing: {}'.format(url))
r = requests.post(url, params=params)
if r.status_code != 202:
raise RuntimeError('POST {} failed: {}'.format(url, r.status_code))
def make_meta(templatefile, version, date):
with open(templatefile, 'rt') as f:
template = f.read()
template = j2.Template(template)
env = {
'VERSION' : version,
'DATE' : date,
}
return json.loads(template.render(**env))
if __name__ == '__main__':
zurl = sys.argv[1]
tkn = sys.argv[2]
depid = sys.argv[3]
upfile = sys.argv[4]
metafile = sys.argv[5]
version = sys.argv[6]
date = sys.argv[7]
meta = make_meta(metafile, version, date)
deposit(zurl, tkn, depid, upfile, meta)
#!/bin/bash
tmp=`dirname $0`
pushd $tmp > /dev/null
thisdir=`pwd`
popd > /dev/null
zenodo_url=$1
zenodo_tkn=$2
zenodo_depid=$3
version=$(cat fsl/version.py |
egrep '^__version__ +=' |
cut -d "=" -f 2 |
tr -d "'" |
tr -d ' ')
upfile=$(pwd)/dist/fslpy-"$version".tar.gz
metafile=$(pwd)/.ci/zenodo_meta.json.jinja2
date=$(date +"%Y-%m-%d")
pip install --retries 10 requests jinja2
python "$thisdir"/zenodo.py \
"$zenodo_url" \
"$zenodo_tkn" \
"$zenodo_depid" \
"$upfile" \
"$metafile" \
"$version" \
"$date"
{
"metadata" : {
"title" : "fslpy",
"upload_type" : "software",
"version" : "{{VERSION}}",
"publication_date" : "{{DATE}}",
"description" : "<p>The fslpy project is a <a href=\"https://fsl.fmrib.ox.ac.uk/fsl/fslwiki\">FSL</a> programming library written in Python. It is used by <a href=\"http://git.fmrib.ox.ac.uk/fsl/fsleyes/fsleyes/\">FSLeyes</a>.</p>\n\n<p>The fslpy library is developed at the Wellcome Centre for Integrative Neuroimaging (FMRIB), at the University of Oxford. It is hosted at <a href=\"https://git.fmrib.ox.ac.uk/fsl/fslpy/\">https://git.fmrib.ox.ac.uk/fsl/fslpy/</a>.</p>",
"keywords" : ["python", "mri", "neuroimaging", "neuroscience"],
"access_right" : "open",
"license" : "Apache-2.0",
"creators" : [
{ "name" : "McCarthy, Paul" },
{ "name" : "Cottaar, Michiel" },
{ "name" : "Webster, Matthew" },
{ "name" : "Fitzgibbon, Sean" },
{ "name" : "Craig, Martin" }
]
}
}
python:
- 3.6
\ No newline at end of file
{% set data = load_setup_py_data() %}
package:
name: fslpy
version: {{ data['version']}}
build:
noarch: python
script: python setup.py install --single-version-externally-managed --record=record.txt
source:
path: ../
requirements:
build:
- python {{ python }}
- setuptools
{% for package in data.get('setup_requires', {}) %}
- {{ package.lower() }}
{% endfor %}
run:
- python
{% for package in data.get('install_requires', {}) %}
- {{ package.lower() }}
{% endfor %}
run_constrained:
{% for name, pkgs in data.get('extras_require', dict()).items() %}
{% for package in pkgs %}
- {{ package.lower() }}
{% endfor %}
{% endfor %}
{% if 'test_suite' in data %}
test:
requires:
{% for package in data.get('setup_requires', {}) %}
- {{ package.lower() }}
{% endfor %}
source_files:
- {{ data['test_suite'] }}
commands:
- python -m pytest {{ data['test_suite'] }} -m "not (fsltest or wxtest or dicomtest or meshtest or igziptest or noroottest or longtest)"
{% endif %}
about:
license_file: LICENSE
if [ -e ${FSLDIR}/etc/fslconf/requestFSLpythonLink.sh ]; then
$FSLDIR/etc/fslconf/requestFSLpythonLink.sh atlasquery atlasq imcp immv imglob extract_noise
fi
if [ -e ${FSLDIR}/etc/fslconf/requestFSLpythonUnlink.sh ]; then
$FSLDIR/etc/fslconf/requestFSLpythonUnlink.sh atlasquery atlasq imcp immv imglob
fi
......@@ -9,12 +9,12 @@
#
# 2. style: Check coding style
#
# 3. doc: Building API documentation
# 3. doc: Building and upload API documentation using GitLab Pages.
#
# 4. build: Building source, wheel and conda distributions
# 4. build: Building source and wheel distributions
#
# 5. deploy: Uploading the build outputs to pypi/hosting servers.
#
# 5. deploy: Uploading the build outputs to pypi/hosting servers, and the
# documentation to a hosting server.
#
# Custom docker images are used for several jobs - these images are
# available at:
......@@ -24,13 +24,10 @@
# The test and style stages are executed on all branches of upstream and fork
# repositories.
#
# The doc stage, and the deploy-doc job, is executed on all branches of the
# upstream repository.
#
# The build stage, and the remaining jobs in the deploy stage, are only
# executed on the upstream repository, and only for release tags.
# The doc stage is executed on release branches of the upstream repository.
#
# The deploy stages are manually instantiated.
# The build and deploy stages are executed on tags on the upstream
# repository, and the deploy stage must be manually instantiated.
#
# Most of the logic for each job is defined in shell scripts in the .ci
# sub-directory.
......@@ -61,34 +58,25 @@ stages:
# - SSH_PRIVATE_KEY_FSL_DOWNLOAD - private key for downloading some FSL
# files from a remote server (FSL_HOST)
#
# - SSH_PRIVATE_KEY_DOC_DEPLOY - private key for rsyncing documentation
# to remote host (DOC_HOST)
#
# - SSH_PRIVATE_KEY_CONDA_DEPLOY - private key for rsyncing conda builds
# to remote host (CONDA_HOST)
#
# - SSH_PRIVATE_KEY_CONDA_INDEX - private key for updating conda channel
# (on CONDA_HOST)
#
# - SSH_SERVER_HOSTKEYS - List of trusted SSH hosts
#
# - DOC_HOST: - Username@host to upload documentation to
# (e.g. "paulmc@jalapeno.fmrib.ox.ac.uk")
#
# - FSL_HOST: - Username@host to download FSL data from
# (e.g. "paulmc@jalapeno.fmrib.ox.ac.uk")
# (most likely "paulmc@localhost")
#
# - CONDA_HOST: - Username@host to upload conda build to
# (e.g. "paulmc@jalapeno.fmrib.ox.ac.uk")
#
# - CONDA_CHANNELS - List of additional conda channels to
# use for conda build.
# - FSL_ATLAS_DIR: - Location of the FSL atlas data on
# FSL_HOST.
#
# - TWINE_USERNAME: - Username to use when uploading to pypi
#
# - TWINE_PASSWORD: - Password to use when uploading to pypi
#
# - TWINE_REPOSITORY_URL: - Pypi repository to upload to
#
# - ZENODO_URL: - Zenodo URL to deposit release file to.
#
# - ZENODO_TOKEN: - Zenodo access token.
#
# - ZENODO_DEPOSIT_ID: - Deposit ID of previous Zenodo deposit.
###############################################################################
......@@ -103,14 +91,9 @@ variables:
####################################
.only_upstream: &only_upstream
.only_release_branches: &only_release_branches
only:
- branches@fsl/fslpy
.only_master: &only_master
only:
- master@fsl/fslpy
- /^v.+$/@fsl/fslpy
.only_releases: &only_releases
......@@ -118,40 +101,53 @@ variables:
- tags@fsl/fslpy
.except_releases: &except_releases
except:
- tags
.setup_ssh: &setup_ssh
before_script:
- bash ./.ci/setup_ssh.sh
###################################################
# The patch_version anchor contains a before_script
# The check_version anchor contains a before_script
# section which is run on release builds, and makes
# sure that the version in the code is up to date
# (i.e. equal to the tag name).
###################################################
.patch_version: &patch_version
.check_version: &check_version
before_script:
- bash ./.ci/patch_version.sh
- bash ./.ci/check_version.sh
############
# Test stage
############
.test_rules: &test_rules
# We only run tests on MRs, and on release branches
# (a more substantial test suite is run on release
# branches - see .ci/test_template.sh). We don't run
# on upstream/main, as all merges are fast-forwards,
# so the tests will have already been run on the MR
# branch. We also allow manually running a pipeline
# via the web interface.
rules:
- if: $SKIP_TESTS != null
when: never
- if: $CI_COMMIT_MESSAGE =~ /\[skip-tests\]/
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: on_success
- if: $CI_PROJECT_PATH == $UPSTREAM_PROJECT && $CI_COMMIT_BRANCH =~ /^v.+$/
when: on_success
- if: $CI_PIPELINE_SOURCE == "web"
when: on_success
- when: never
.test: &test_template
<<: *setup_ssh
# Releases are just tags on a release
# branch, so we don't need to test them.
<<: *except_releases
<<: *test_rules
tags:
- docker
......@@ -160,30 +156,42 @@ variables:
- bash ./.ci/test_template.sh
test:2.7:
test:3.10:
stage: test
image: pauldmccarthy/fsleyes-py27-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py310-wxpy4-gtk3
<<: *test_template
test:3.4:
test:3.11:
stage: test
image: pauldmccarthy/fsleyes-py34-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py311-wxpy4-gtk3
<<: *test_template
test:3.5:
test:3.12:
stage: test
image: pauldmccarthy/fsleyes-py35-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py312-wxpy4-gtk3
<<: *test_template
test:3.6:
test:3.13:
stage: test
image: pauldmccarthy/fsleyes-py36-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py313-wxpy4-gtk3
<<: *test_template
test:build-pypi-dist:
stage: test
image: python:3.10
<<: *test_rules
tags:
- docker
script:
- bash ./.ci/build_pypi_dist.sh
#############
# Style stage
#############
......@@ -191,33 +199,37 @@ test:3.6:
style:
stage: style
image: pauldmccarthy/fsleyes-py35-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py310-wxpy4-gtk3
<<: *test_template
variables:
TEST_STYLE: "true"
###########
# Doc stage
###########
#############
# Pages stage
#############
# I would like to have separate doc deploys for
# both the main and latest release branches,
# but this is awkward with gitlab pages. So
# currently the most recently executed pages
# job is the one that gets deployed.
build-doc:
<<: *only_upstream
<<: *patch_version
pages:
<<: *only_release_branches
tags:
- docker
stage: doc
image: python:3.5
image: pauldmccarthy/fsleyes-py310-wxpy4-gtk3
script:
- bash ./.ci/build_doc.sh
artifacts:
expire_in: 1 day
paths:
- doc/$CI_COMMIT_REF_NAME
- public
#############
......@@ -227,10 +239,10 @@ build-doc:
build-pypi-dist:
<<: *only_releases
<<: *patch_version
<<: *check_version
stage: build
image: python:3.5
image: python:3.10
tags:
- docker
......@@ -244,53 +256,17 @@ build-pypi-dist:
- dist/*
build-conda-dist:
<<: *only_releases
<<: *patch_version
stage: build
image: continuumio/miniconda3
tags:
- docker
script:
- bash ./.ci/build_conda_dist.sh fslpy "$CI_COMMIT_REF_NAME"
artifacts:
expire_in: 1 day
paths:
- dist/conda-bld
##############
# Deploy stage
##############
deploy-doc:
<<: *only_upstream
<<: *setup_ssh
stage: deploy
when: manual
image: python:3.5
tags:
- docker
dependencies:
- build-doc
script:
- bash ./.ci/deploy_doc.sh
deploy-pypi:
<<: *only_releases
<<: *setup_ssh
stage: deploy
when: manual
image: python:3.5
image: python:3.10
tags:
- docker
......@@ -302,19 +278,18 @@ deploy-pypi:
- bash ./.ci/deploy_pypi.sh
deploy-conda:
deploy-zenodo:
<<: *only_releases
<<: *setup_ssh
stage: deploy
when: manual
image: python:3.5
image: python:3.10
tags:
- docker
dependencies:
- build-conda-dist
- build-pypi-dist
script:
- bash ./.ci/deploy_conda.sh
- bash ./.ci/zenodo_deposit.sh "$ZENODO_URL" "$ZENODO_TOKEN" "$ZENODO_DEPOSIT_ID"
Paul McCarthy <pauldmccarthy@gmail.com>
Michiel Cottaar <michiel.cottaar@ndcn.ox.ac.uk>
Matthew Webster <matthew.webster@ndcn.ox.ac.uk>
Sean Fitzgibbon <sean.fitzgibbon@ndcn.ox.ac.uk>
\ No newline at end of file
Sean Fitzgibbon <sean.fitzgibbon@ndcn.ox.ac.uk>
Martin Craig <martin.craig@eng.ox.ac.uk>
Taylor Hanayik <taylor.hanayik@ndcn.ox.ac.uk>
Evan Edmond <evan.edmond@ndcn.ox.ac.uk>
Christoph Arthofer <christoph.arthofer@ndcn.oxc.ac.uk>
Fidel Alfaro Almagro <fidel.alfaroalmagro@ndcn.ox.ac.uk>
\ No newline at end of file
This diff is collapsed.
Copyright 2016-2018 University of Oxford, Oxford, UK
Copyright 2016-2023 University of Oxford, Oxford, UK