Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • paulmc/fslpy
  • ndcn0236/fslpy
  • seanf/fslpy
3 results
Show changes
Commits on Source (1918)
#!/usr/bin/env bash
set -e
name=$1
version=$2
# add any extra channels that are needed
for channel in $CONDA_CHANNELS; do
conda config --append channels $channel
done
# insert project name/version into meta.yaml
echo "{% set name = '$name' %}" > vars.txt
echo "{% set version = '$version' %}" >> vars.txt
cat vars.txt .conda/meta.yaml > tempfile
mv tempfile .conda/meta.yaml
rm vars.txt
mkdir -p dist
conda update --yes conda
conda install --yes setuptools conda-build
conda build --output-folder=dist .conda
# tar it up
cd dist
tar czf "$name"-"$version"-conda.tar.gz *
......@@ -2,5 +2,6 @@
set -e
python setup.py doc
mv doc/html doc/"$CI_COMMIT_REF_NAME"
source /test.venv/bin/activate
pip install ".[doc]"
sphinx-build doc public
......@@ -2,6 +2,20 @@
set -e
pip install wheel
python setup.py sdist
python setup.py bdist_wheel
pip install --upgrade pip wheel setuptools twine build
python -m build
twine check dist/*
# do a test install from both source and wheel
sdist=`find dist -maxdepth 1 -name *.tar.gz`
wheel=`find dist -maxdepth 1 -name *.whl`
for target in $sdist $wheel; do
python -m venv test.venv
. test.venv/bin/activate
pip install --upgrade pip setuptools
pip install $target
deactivate
rm -r test.venv
done
#!/usr/bin/env bash
set -e
cat fsl/version.py | egrep "^__version__ += +'$CI_COMMIT_REF_NAME' *$"
#!/usr/bin/env bash
set -e
rsync -rv dist/*conda.tar.gz "condadeploy:"
#!/usr/bin/env bash
set -e
rsync -rv doc/"$CI_COMMIT_REF_NAME" "docdeploy:"
#!/usr/bin/env bash
set -e
if [[ "x$CI_COMMIT_TAG" != "x" ]]; then
echo "Release detected - patching version - $CI_COMMIT_REF_NAME";
python -c "import fsl.version as v; v.patchVersion('fsl/version.py', '$CI_COMMIT_REF_NAME')";
fi
......@@ -21,48 +21,39 @@ set -e
if [[ -f /.dockerenv ]]; then
# We have to use different host names to connect
# to the docker daemon host on mac as opposed
# to on linux.
#
# On linux (assuming the docker job is running
# with --net=host), we can connect via
# username@localhost.
#
# On mac, we have to connect via
# username@host.docker.internal
if [[ "$CI_RUNNER_TAGS" == *"macOS"* ]]; then
if [[ "$FSL_HOST" == *"@localhost" ]]; then
FSL_HOST=${FSL_HOST/localhost/host.docker.internal}
fi
fi
apt-get update -y || yum -y check-update || true;
apt-get install -y openssh-client rsync git || yum install -y openssh-client rsync git || true;
eval $(ssh-agent -s);
mkdir -p $HOME/.ssh;
echo "$SSH_PRIVATE_KEY_GIT" > $HOME/.ssh/id_git;
# for downloading FSL atlases/standards
echo "$SSH_PRIVATE_KEY_FSL_DOWNLOAD" > $HOME/.ssh/id_fsl_download;
if [[ "$CI_PROJECT_PATH" == "$UPSTREAM_PROJECT" ]]; then
echo "$SSH_PRIVATE_KEY_DOC_DEPLOY" > $HOME/.ssh/id_doc_deploy;
echo "$SSH_PRIVATE_KEY_CONDA_DEPLOY" > $HOME/.ssh/id_conda_deploy;
fi;
chmod go-rwx $HOME/.ssh/id_*;
ssh-add $HOME/.ssh/id_git;
ssh-add $HOME/.ssh/id_fsl_download;
if [[ "$CI_PROJECT_PATH" == "$UPSTREAM_PROJECT" ]]; then
ssh-add $HOME/.ssh/id_doc_deploy;
ssh-add $HOME/.ssh/id_conda_deploy;
fi
echo "$SSH_SERVER_HOSTKEYS" > $HOME/.ssh/known_hosts;
ssh-keyscan ${FSL_HOST##*@} >> $HOME/.ssh/known_hosts;
touch $HOME/.ssh/config;
echo "Host ${UPSTREAM_URL##*@}" >> $HOME/.ssh/config;
echo " User ${UPSTREAM_URL%@*}" >> $HOME/.ssh/config;
echo " IdentityFile $HOME/.ssh/id_git" >> $HOME/.ssh/config;
echo "Host docdeploy" >> $HOME/.ssh/config;
echo " HostName ${DOC_HOST##*@}" >> $HOME/.ssh/config;
echo " User ${DOC_HOST%@*}" >> $HOME/.ssh/config;
echo " IdentityFile $HOME/.ssh/id_doc_deploy" >> $HOME/.ssh/config;
echo "Host condadeploy" >> $HOME/.ssh/config;
echo " HostName ${CONDA_HOST##*@}" >> $HOME/.ssh/config;
echo " User ${CONDA_HOST%@*}" >> $HOME/.ssh/config;
echo " IdentityFile $HOME/.ssh/id_conda_deploy" >> $HOME/.ssh/config;
echo "Host fsldownload" >> $HOME/.ssh/config;
echo " HostName ${FSL_HOST##*@}" >> $HOME/.ssh/config;
echo " User ${FSL_HOST%@*}" >> $HOME/.ssh/config;
......
......@@ -2,52 +2,58 @@
set -e
# If running on a fork repository, we merge in the
# upstream/master branch. This is done so that merge
# requests from fork to the parent repository will
# have unit tests run on the merged code, something
# which gitlab CE does not currently do for us.
if [[ "$CI_PROJECT_PATH" != "$UPSTREAM_PROJECT" ]]; then
git fetch upstream;
git merge --no-commit --no-ff upstream/master;
fi;
source /test.venv/bin/activate
pip install -r requirements.txt
pip install -r requirements-extra.txt
pip install -r requirements-dev.txt
pip install ".[extra,test,style]"
# style stage
if [ "$TEST_STYLE"x != "x" ]; then pip install pylint flake8; fi;
if [ "$TEST_STYLE"x != "x" ]; then flake8 fsl || true; fi;
if [ "$TEST_STYLE"x != "x" ]; then pylint --output-format=colorized fsl || true; fi;
if [ "$TEST_STYLE"x != "x" ]; then exit 0; fi
if [ "$TEST_STYLE"x != "x" ]; then exit 0; fi;
# We need the FSL atlases for the atlas
# tests, and need $FSLDIR to be defined
export FSLDIR=/fsl/
mkdir -p $FSLDIR/data/
rsync -rv "fsldownload:data/atlases/" "$FSLDIR/data/atlases/"
rsync -rv "fsldownload:$FSL_ATLAS_DIR" "$FSLDIR/data/atlases/"
# Run the tests. Suppress coverage
# reporting until after we're finished.
TEST_OPTS="--cov-report= --cov-append"
# Finally, run the damned tests.
# pytest struggles with my organisation of
# the fslpy package, where all tests are in
# fsl.tests, and fsl is a namespace package
touch fsl/__init__.py
# We run some tests under xvfb-run
# because they invoke wx. Sleep in
# between, otherwise xvfb gets upset.
xvfb-run python setup.py test --addopts="$TEST_OPTS tests/test_idle.py"
xvfb-run -a pytest $TEST_OPTS fsl/tests/test_idle.py
sleep 5
xvfb-run python setup.py test --addopts="$TEST_OPTS tests/test_platform.py"
xvfb-run -a pytest $TEST_OPTS fsl/tests/test_platform.py
# We run the immv/imcpy tests as the nobody
# We run the immv/imcp tests as the nobody
# user because some tests expect permission
# denied errors when looking at files, and
# root never gets denied. Make everything in
# this directory writable by anybody (which,
# unintuitively, includes nobody)
chmod -R a+w `pwd`
su -s /bin/bash -c 'source /test.venv/bin/activate && python setup.py test --addopts="$TEST_OPTS tests/test_immv_imcp.py"' nobody
# All other tests can be run as normal
python setup.py test --addopts="$TEST_OPTS --ignore=tests/test_idle.py --ignore=tests/test_platform.py --ignore=tests/test_immv_imcp.py"
python -m coverage report
cmd="source /test.venv/bin/activate && pytest"
cmd="$cmd $TEST_OPTS fsl/tests/test_scripts/test_immv_imcp.py fsl/tests/test_immv_imcp.py"
su -s /bin/bash -c "$cmd" nobody
# All other tests can be run as normal.
pytest $TEST_OPTS -m 'not longtest' \
--ignore=fsl/tests/test_idle.py \
--ignore=fsl/tests/test_platform.py \
--ignore=fsl/tests/test_immv_imcp.py \
--ignore=fsl/tests/test_scripts/test_immv_imcp.py
# Long tests are only run on release branches
if [[ $CI_COMMIT_REF_NAME == v* ]]; then
pytest $TEST_OPTS -m 'longtest'
fi
python -m coverage report -i
#!/usr/bin/env python
#
# Deposit a new version of something on zenodo.
#
# It is assumed that a deposit already exists on zenodo - you must
# specify the deposit ID of that original deposit.
#
# http://developers.zenodo.org/#rest-api
import os.path as op
import sys
import json
import jinja2 as j2
import requests
def deposit(zenodo_url, access_token, dep_id, upload_file, meta):
urlbase = '{}/api/deposit/depositions'.format(zenodo_url)
headers = {'Content-Type': 'application/json'}
params = {'access_token' : access_token}
# Create a new deposit
url = '{}/{}/actions/newversion'.format(urlbase, dep_id)
print('Creating new deposit: {}'.format(url))
r = requests.post(url, params=params)
if r.status_code != 201:
raise RuntimeError('POST {} failed: {}'.format(url, r.status_code))
newurl = r.json()['links']['latest_draft']
dep_id = newurl.split('/')[-1]
print("New deposition ID: {}".format(dep_id))
# Upload the file
data = {'filename': op.basename(upload_file)}
files = {'file': open(upload_file, 'rb')}
url = '{}/{}/files'.format(urlbase, dep_id)
print('Uploading file: {}'.format(url))
r = requests.post(url, params=params, data=data, files=files)
if r.status_code != 201:
raise RuntimeError('POST {} failed: {}'.format(url, r.status_code))
# Upload the metadata
url = '{}/{}?access_token={}'.format(urlbase, dep_id, access_token)
print('Uploading metadata: {}'.format(url))
r = requests.put(url, data=json.dumps(meta), headers=headers)
if r.status_code != 200:
print(r.json())
raise RuntimeError('PUT {} failed: {}'.format(url, r.status_code))
# Publish
url = '{}/{}/actions/publish'.format(urlbase, dep_id)
print('Publishing: {}'.format(url))
r = requests.post(url, params=params)
if r.status_code != 202:
raise RuntimeError('POST {} failed: {}'.format(url, r.status_code))
def make_meta(templatefile, version, date):
with open(templatefile, 'rt') as f:
template = f.read()
template = j2.Template(template)
env = {
'VERSION' : version,
'DATE' : date,
}
return json.loads(template.render(**env))
if __name__ == '__main__':
zurl = sys.argv[1]
tkn = sys.argv[2]
depid = sys.argv[3]
upfile = sys.argv[4]
metafile = sys.argv[5]
version = sys.argv[6]
date = sys.argv[7]
meta = make_meta(metafile, version, date)
deposit(zurl, tkn, depid, upfile, meta)
#!/bin/bash
tmp=`dirname $0`
pushd $tmp > /dev/null
thisdir=`pwd`
popd > /dev/null
zenodo_url=$1
zenodo_tkn=$2
zenodo_depid=$3
version=$(cat fsl/version.py |
egrep '^__version__ +=' |
cut -d "=" -f 2 |
tr -d "'" |
tr -d ' ')
upfile=$(pwd)/dist/fslpy-"$version".tar.gz
metafile=$(pwd)/.ci/zenodo_meta.json.jinja2
date=$(date +"%Y-%m-%d")
pip install --retries 10 requests jinja2
python "$thisdir"/zenodo.py \
"$zenodo_url" \
"$zenodo_tkn" \
"$zenodo_depid" \
"$upfile" \
"$metafile" \
"$version" \
"$date"
{
"metadata" : {
"title" : "fslpy",
"upload_type" : "software",
"version" : "{{VERSION}}",
"publication_date" : "{{DATE}}",
"description" : "<p>The fslpy project is a <a href=\"https://fsl.fmrib.ox.ac.uk/fsl/fslwiki\">FSL</a> programming library written in Python. It is used by <a href=\"http://git.fmrib.ox.ac.uk/fsl/fsleyes/fsleyes/\">FSLeyes</a>.</p>\n\n<p>The fslpy library is developed at the Wellcome Centre for Integrative Neuroimaging (FMRIB), at the University of Oxford. It is hosted at <a href=\"https://git.fmrib.ox.ac.uk/fsl/fslpy/\">https://git.fmrib.ox.ac.uk/fsl/fslpy/</a>.</p>",
"keywords" : ["python", "mri", "neuroimaging", "neuroscience"],
"access_right" : "open",
"license" : "Apache-2.0",
"creators" : [
{ "name" : "McCarthy, Paul" },
{ "name" : "Cottaar, Michiel" },
{ "name" : "Webster, Matthew" },
{ "name" : "Fitzgibbon, Sean" },
{ "name" : "Craig, Martin" }
]
}
}
python:
- 3.6
\ No newline at end of file
package:
name: '{{ name }}'
version: '{{ version }}'
build:
noarch: python
script: python setup.py install --single-version-externally-managed --record=record.txt
source:
path:
../
requirements:
build:
- python {{ python }}
- setuptools
- six 1.*
- deprecation 1.*
- numpy 1.*
- scipy >=0.18,<1
- nibabel 2.*
- sphinx 1.6.*
- sphinx_rtd_theme 0.*
- mock 2.*
- coverage 4.*
- pytest 3.*
- pytest-cov 2.*
- pytest-runner >=2.*,<=3.*
run:
- python
- six 1.*
- deprecation 1.*
- numpy 1.*
- scipy >=0.18,<1
- nibabel 2.*
run_constrained:
- indexed_gzip >=0.6.1,<1
- wxpython >=3.0.2.0,<4.1
- trimesh 2.*
- rtree 0.8
......@@ -9,12 +9,12 @@
#
# 2. style: Check coding style
#
# 3. doc: Building API documentation
# 3. doc: Building and upload API documentation using GitLab Pages.
#
# 4. build: Building source, wheel and conda distributions
# 4. build: Building source and wheel distributions
#
# 5. deploy: Uploading the build outputs to pypi/hosting servers.
#
# 5. deploy: Uploading the build outputs to pypi/hosting servers, and the
# documentation to a hosting server.
#
# Custom docker images are used for several jobs - these images are
# available at:
......@@ -24,13 +24,10 @@
# The test and style stages are executed on all branches of upstream and fork
# repositories.
#
# The doc stage, and the deploy-doc job, is executed on all branches of the
# upstream repository.
#
# The build stage, and the remaining jobs in the deploy stage, are only
# executed on the upstream repository, and only for release tags.
# The doc stage is executed on release branches of the upstream repository.
#
# The deploy stages are manually instantiated.
# The build and deploy stages are executed on tags on the upstream
# repository, and the deploy stage must be manually instantiated.
#
# Most of the logic for each job is defined in shell scripts in the .ci
# sub-directory.
......@@ -61,38 +58,31 @@ stages:
# - SSH_PRIVATE_KEY_FSL_DOWNLOAD - private key for downloading some FSL
# files from a remote server (FSL_HOST)
#
# - SSH_PRIVATE_KEY_DOC_DEPLOY - private key for rsyncing documentation
# to remote host (DOC_HOST)
#
# - SSH_PRIVATE_KEY_CONDA_DEPLOY - private key for rsyncing conda builds
# to remote host (CONDA_HOST)
#
# - SSH_SERVER_HOSTKEYS - List of trusted SSH hosts
#
# - DOC_HOST: - Username@host to upload documentation to
# (e.g. "paulmc@jalapeno.fmrib.ox.ac.uk")
#
# - FSL_HOST: - Username@host to download FSL data from
# (e.g. "paulmc@jalapeno.fmrib.ox.ac.uk")
#
# - CONDA_HOST: - Username@host to upload conda build to
# (e.g. "paulmc@jalapeno.fmrib.ox.ac.uk")
# (most likely "paulmc@localhost")
#
# - CONDA_CHANNELS - List of additional conda channels to
# use for conda build.
# - FSL_ATLAS_DIR: - Location of the FSL atlas data on
# FSL_HOST.
#
# - TWINE_USERNAME: - Username to use when uploading to pypi
#
# - TWINE_PASSWORD: - Password to use when uploading to pypi
#
# - TWINE_REPOSITORY_URL: - Pypi repository to upload to
#
# - ZENODO_URL: - Zenodo URL to deposit release file to.
#
# - ZENODO_TOKEN: - Zenodo access token.
#
# - ZENODO_DEPOSIT_ID: - Deposit ID of previous Zenodo deposit.
###############################################################################
variables:
UPSTREAM_PROJECT: "fsl/fslpy"
UPSTREAM_URL: "git@git.fmrib.ox.ac.uk"
TEST_OPTS: "--cov-report= --cov-append"
UPSTREAM_PROJECT: "fsl/fslpy"
UPSTREAM_URL: "git@git.fmrib.ox.ac.uk"
####################################
......@@ -101,14 +91,9 @@ variables:
####################################
.only_upstream: &only_upstream
only:
- branches@fsl/fslpy
.only_master: &only_master
.only_release_branches: &only_release_branches
only:
- master@fsl/fslpy
- /^v.+$/@fsl/fslpy
.only_releases: &only_releases
......@@ -116,40 +101,53 @@ variables:
- tags@fsl/fslpy
.except_releases: &except_releases
except:
- tags
.setup_ssh: &setup_ssh
before_script:
- bash ./.ci/setup_ssh.sh
###################################################
# The patch_version anchor contains a before_script
# The check_version anchor contains a before_script
# section which is run on release builds, and makes
# sure that the version in the code is up to date
# (i.e. equal to the tag name).
###################################################
.patch_version: &patch_version
.check_version: &check_version
before_script:
- bash ./.ci/patch_version.sh
- bash ./.ci/check_version.sh
############
# Test stage
############
.test_rules: &test_rules
# We only run tests on MRs, and on release branches
# (a more substantial test suite is run on release
# branches - see .ci/test_template.sh). We don't run
# on upstream/main, as all merges are fast-forwards,
# so the tests will have already been run on the MR
# branch. We also allow manually running a pipeline
# via the web interface.
rules:
- if: $SKIP_TESTS != null
when: never
- if: $CI_COMMIT_MESSAGE =~ /\[skip-tests\]/
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: on_success
- if: $CI_PROJECT_PATH == $UPSTREAM_PROJECT && $CI_COMMIT_BRANCH =~ /^v.+$/
when: on_success
- if: $CI_PIPELINE_SOURCE == "web"
when: on_success
- when: never
.test: &test_template
<<: *setup_ssh
# Releases are just tags on a release
# branch, so we don't need to test them.
<<: *except_releases
<<: *test_rules
tags:
- docker
......@@ -158,30 +156,42 @@ variables:
- bash ./.ci/test_template.sh
test:2.7:
test:3.10:
stage: test
image: pauldmccarthy/fsleyes-py27-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py310-wxpy4-gtk3
<<: *test_template
test:3.4:
test:3.11:
stage: test
image: pauldmccarthy/fsleyes-py34-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py311-wxpy4-gtk3
<<: *test_template
test:3.5:
test:3.12:
stage: test
image: pauldmccarthy/fsleyes-py35-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py312-wxpy4-gtk3
<<: *test_template
test:3.6:
test:3.13:
stage: test
image: pauldmccarthy/fsleyes-py36-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py313-wxpy4-gtk3
<<: *test_template
test:build-pypi-dist:
stage: test
image: python:3.10
<<: *test_rules
tags:
- docker
script:
- bash ./.ci/build_pypi_dist.sh
#############
# Style stage
#############
......@@ -189,33 +199,37 @@ test:3.6:
style:
stage: style
image: pauldmccarthy/fsleyes-py35-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py310-wxpy4-gtk3
<<: *test_template
variables:
TEST_STYLE: "true"
###########
# Doc stage
###########
#############
# Pages stage
#############
# I would like to have separate doc deploys for
# both the main and latest release branches,
# but this is awkward with gitlab pages. So
# currently the most recently executed pages
# job is the one that gets deployed.
build-doc:
<<: *only_upstream
<<: *patch_version
pages:
<<: *only_release_branches
tags:
- docker
stage: doc
image: python:3.5
image: pauldmccarthy/fsleyes-py310-wxpy4-gtk3
script:
- bash ./.ci/build_doc.sh
artifacts:
expire_in: 1 day
paths:
- doc/$CI_COMMIT_REF_NAME
- public
#############
......@@ -225,10 +239,10 @@ build-doc:
build-pypi-dist:
<<: *only_releases
<<: *patch_version
<<: *check_version
stage: build
image: python:3.5
image: python:3.10
tags:
- docker
......@@ -242,53 +256,17 @@ build-pypi-dist:
- dist/*
build-conda-dist:
<<: *only_releases
<<: *patch_version
stage: build
image: continuumio/miniconda3
tags:
- docker
script:
- bash ./.ci/build_conda_dist.sh fslpy "$CI_COMMIT_REF_NAME"
artifacts:
expire_in: 1 day
paths:
- dist/*conda.tar.gz
##############
# Deploy stage
##############
deploy-doc:
<<: *only_upstream
<<: *setup_ssh
stage: deploy
when: manual
image: python:3.5
tags:
- docker
dependencies:
- build-doc
script:
- bash ./.ci/deploy_doc.sh
deploy-pypi:
<<: *only_releases
<<: *setup_ssh
stage: deploy
when: manual
image: python:3.5
image: python:3.10
tags:
- docker
......@@ -300,19 +278,18 @@ deploy-pypi:
- bash ./.ci/deploy_pypi.sh
deploy-conda:
deploy-zenodo:
<<: *only_releases
<<: *setup_ssh
stage: deploy
when: manual
image: python:3.5
image: python:3.10
tags:
- docker
dependencies:
- build-conda-dist
- build-pypi-dist
script:
- bash ./.ci/deploy_conda.sh
- bash ./.ci/zenodo_deposit.sh "$ZENODO_URL" "$ZENODO_TOKEN" "$ZENODO_DEPOSIT_ID"
Paul McCarthy <pauldmccarthy@gmail.com>
Michiel Cottaar <michiel.cottaar@ndcn.ox.ac.uk>
Matthew Webster <matthew.webster@ndcn.ox.ac.uk>
Sean Fitzgibbon <sean.fitzgibbon@ndcn.ox.ac.uk>
Martin Craig <martin.craig@eng.ox.ac.uk>
Taylor Hanayik <taylor.hanayik@ndcn.ox.ac.uk>
Evan Edmond <evan.edmond@ndcn.ox.ac.uk>
Christoph Arthofer <christoph.arthofer@ndcn.oxc.ac.uk>
Fidel Alfaro Almagro <fidel.alfaroalmagro@ndcn.ox.ac.uk>
\ No newline at end of file
This diff is collapsed.
Copyright 2016-2017 University of Oxford, Oxford, UK
Copyright 2016-2023 University of Oxford, Oxford, UK
The fslpy library
Copyright 2016-2017 University of Oxford, Oxford, UK.
Copyright 2016-2023 University of Oxford, Oxford, UK.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
......
include LICENSE
include AUTHOR
include CHANGELOG.rst
include COPYRIGHT
include requirements.txt
include requirements-dev.txt
include pytest.ini
recursive-include doc *
recursive-exclude doc/html *
recursive-include tests *
include LICENSE
include README.rst
include conftest.py
recursive-include doc *
recursive-include fsl/tests *