Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • paulmc/fslpy
  • ndcn0236/fslpy
  • seanf/fslpy
3 results
Show changes
Commits on Source (1573)
......@@ -2,6 +2,6 @@
set -e
pip install -r requirements-dev.txt
python setup.py doc
mv doc/html doc/"$CI_COMMIT_REF_NAME"
source /test.venv/bin/activate
pip install ".[doc]"
sphinx-build doc public
......@@ -2,27 +2,15 @@
set -e
pip install wheel
python setup.py sdist
python setup.py bdist_wheel
pip install --upgrade pip wheel setuptools twine build
python -m build
twine check dist/*
# do a test install from both source and wheel
sdist=`find dist -maxdepth 1 -name *.tar.gz`
wheel=`find dist -maxdepth 1 -name *.whl`
# pip < 10 will not install wheels
# with an invalid name. So we can
# generate builds from non-releases
# (e.g. master master branch),
# we hack the wheel file name here
# so that pip will accept it.
#
# This will no longer be necessary
# when pip 10 is available.
nwheel=`echo -n $wheel | sed -e 's/fslpy-/fslpy-0/g'`
mv $wheel $nwheel
wheel=$nwheel
for target in $sdist $wheel; do
python -m venv test.venv
. test.venv/bin/activate
......
#!/usr/bin/env bash
set -e
cat fsl/version.py | egrep "^__version__ += +'$CI_COMMIT_REF_NAME' *$"
#!/usr/bin/env bash
set -e
rsync -rv doc/"$CI_COMMIT_REF_NAME" "docdeploy:"
#!/usr/bin/env bash
set -e
if [[ "x$CI_COMMIT_TAG" != "x" ]]; then
echo "Release detected - patching version - $CI_COMMIT_REF_NAME";
python -c "import fsl.version as v; v.patchVersion('fsl/version.py', '$CI_COMMIT_REF_NAME')";
fi
......@@ -21,43 +21,39 @@ set -e
if [[ -f /.dockerenv ]]; then
# We have to use different host names to connect
# to the docker daemon host on mac as opposed
# to on linux.
#
# On linux (assuming the docker job is running
# with --net=host), we can connect via
# username@localhost.
#
# On mac, we have to connect via
# username@host.docker.internal
if [[ "$CI_RUNNER_TAGS" == *"macOS"* ]]; then
if [[ "$FSL_HOST" == *"@localhost" ]]; then
FSL_HOST=${FSL_HOST/localhost/host.docker.internal}
fi
fi
apt-get update -y || yum -y check-update || true;
apt-get install -y openssh-client rsync git || yum install -y openssh-client rsync git || true;
eval $(ssh-agent -s);
mkdir -p $HOME/.ssh;
echo "$SSH_PRIVATE_KEY_GIT" > $HOME/.ssh/id_git;
# for downloading FSL atlases/standards
echo "$SSH_PRIVATE_KEY_FSL_DOWNLOAD" > $HOME/.ssh/id_fsl_download;
if [[ "$CI_PROJECT_PATH" == "$UPSTREAM_PROJECT" ]]; then
echo "$SSH_PRIVATE_KEY_DOC_DEPLOY" > $HOME/.ssh/id_doc_deploy;
fi;
chmod go-rwx $HOME/.ssh/id_*;
ssh-add $HOME/.ssh/id_git;
ssh-add $HOME/.ssh/id_fsl_download;
if [[ "$CI_PROJECT_PATH" == "$UPSTREAM_PROJECT" ]]; then
ssh-add $HOME/.ssh/id_doc_deploy;
fi
ssh-keyscan ${UPSTREAM_URL##*@} >> $HOME/.ssh/known_hosts;
ssh-keyscan ${DOC_HOST##*@} >> $HOME/.ssh/known_hosts;
ssh-keyscan ${FSL_HOST##*@} >> $HOME/.ssh/known_hosts;
ssh-keyscan ${FSL_HOST##*@} >> $HOME/.ssh/known_hosts;
touch $HOME/.ssh/config;
echo "Host ${UPSTREAM_URL##*@}" >> $HOME/.ssh/config;
echo " User ${UPSTREAM_URL%@*}" >> $HOME/.ssh/config;
echo " IdentityFile $HOME/.ssh/id_git" >> $HOME/.ssh/config;
echo "Host docdeploy" >> $HOME/.ssh/config;
echo " HostName ${DOC_HOST##*@}" >> $HOME/.ssh/config;
echo " User ${DOC_HOST%@*}" >> $HOME/.ssh/config;
echo " IdentityFile $HOME/.ssh/id_doc_deploy" >> $HOME/.ssh/config;
echo "Host fsldownload" >> $HOME/.ssh/config;
echo " HostName ${FSL_HOST##*@}" >> $HOME/.ssh/config;
echo " User ${FSL_HOST%@*}" >> $HOME/.ssh/config;
......
......@@ -2,43 +2,36 @@
set -e
# If running on a fork repository, we merge in the
# upstream/master branch. This is done so that merge
# requests from fork to the parent repository will
# have unit tests run on the merged code, something
# which gitlab CE does not currently do for us.
if [[ "$CI_PROJECT_PATH" != "$UPSTREAM_PROJECT" ]]; then
git fetch upstream;
git merge --no-commit --no-ff upstream/master;
fi;
source /test.venv/bin/activate
pip install --retries 10 -r requirements.txt
pip install --retries 10 -r requirements-extra.txt
pip install --retries 10 -r requirements-dev.txt
pip install ".[extra,test,style]"
# style stage
if [ "$TEST_STYLE"x != "x" ]; then pip install --retries 10 pylint flake8; fi;
if [ "$TEST_STYLE"x != "x" ]; then flake8 fsl || true; fi;
if [ "$TEST_STYLE"x != "x" ]; then pylint --output-format=colorized fsl || true; fi;
if [ "$TEST_STYLE"x != "x" ]; then exit 0; fi
if [ "$TEST_STYLE"x != "x" ]; then exit 0; fi;
# We need the FSL atlases for the atlas
# tests, and need $FSLDIR to be defined
export FSLDIR=/fsl/
mkdir -p $FSLDIR/data/
rsync -rv "fsldownload:data/atlases/" "$FSLDIR/data/atlases/"
rsync -rv "fsldownload:$FSL_ATLAS_DIR" "$FSLDIR/data/atlases/"
# Finally, run the damned tests.
# Run the tests. Suppress coverage
# reporting until after we're finished.
TEST_OPTS="--cov-report= --cov-append"
# pytest struggles with my organisation of
# the fslpy package, where all tests are in
# fsl.tests, and fsl is a namespace package
touch fsl/__init__.py
# We run some tests under xvfb-run
# because they invoke wx. Sleep in
# between, otherwise xvfb gets upset.
xvfb-run python setup.py test --addopts="$TEST_OPTS tests/test_idle.py"
xvfb-run -a pytest $TEST_OPTS fsl/tests/test_idle.py
sleep 5
xvfb-run python setup.py test --addopts="$TEST_OPTS tests/test_platform.py"
xvfb-run -a pytest $TEST_OPTS fsl/tests/test_platform.py
# We run the immv/imcp tests as the nobody
# user because some tests expect permission
......@@ -47,16 +40,20 @@ xvfb-run python setup.py test --addopts="$TEST_OPTS tests/test_platform.py"
# this directory writable by anybody (which,
# unintuitively, includes nobody)
chmod -R a+w `pwd`
cmd="source /test.venv/bin/activate && python setup.py test"
cmd="$cmd --addopts='$TEST_OPTS tests/test_immv_imcp.py'"
cmd="source /test.venv/bin/activate && pytest"
cmd="$cmd $TEST_OPTS fsl/tests/test_scripts/test_immv_imcp.py fsl/tests/test_immv_imcp.py"
su -s /bin/bash -c "$cmd" nobody
# All other tests can be run as normal.
python setup.py test --addopts="$TEST_OPTS -m 'not longtest' --ignore=tests/test_idle.py --ignore=tests/test_platform.py --ignore=tests/test_immv_imcp.py"
pytest $TEST_OPTS -m 'not longtest' \
--ignore=fsl/tests/test_idle.py \
--ignore=fsl/tests/test_platform.py \
--ignore=fsl/tests/test_immv_imcp.py \
--ignore=fsl/tests/test_scripts/test_immv_imcp.py
# Long tests are only run on release branches
if [[ $CI_COMMIT_REF_NAME == v* ]]; then
python setup.py test --addopts="$TEST_OPTS -m 'longtest'"
pytest $TEST_OPTS -m 'longtest'
fi
python -m coverage report
python -m coverage report -i
#!/usr/bin/env python
#
# Deposit a new version of something on zenodo.
#
# It is assumed that a deposit already exists on zenodo - you must
# specify the deposit ID of that original deposit.
#
# http://developers.zenodo.org/#rest-api
import os.path as op
import sys
import json
import jinja2 as j2
import requests
def deposit(zenodo_url, access_token, dep_id, upload_file, meta):
urlbase = '{}/api/deposit/depositions'.format(zenodo_url)
headers = {'Content-Type': 'application/json'}
params = {'access_token' : access_token}
# Create a new deposit
url = '{}/{}/actions/newversion'.format(urlbase, dep_id)
print('Creating new deposit: {}'.format(url))
r = requests.post(url, params=params)
if r.status_code != 201:
raise RuntimeError('POST {} failed: {}'.format(url, r.status_code))
newurl = r.json()['links']['latest_draft']
dep_id = newurl.split('/')[-1]
print("New deposition ID: {}".format(dep_id))
# Upload the file
data = {'filename': op.basename(upload_file)}
files = {'file': open(upload_file, 'rb')}
url = '{}/{}/files'.format(urlbase, dep_id)
print('Uploading file: {}'.format(url))
r = requests.post(url, params=params, data=data, files=files)
if r.status_code != 201:
raise RuntimeError('POST {} failed: {}'.format(url, r.status_code))
# Upload the metadata
url = '{}/{}?access_token={}'.format(urlbase, dep_id, access_token)
print('Uploading metadata: {}'.format(url))
r = requests.put(url, data=json.dumps(meta), headers=headers)
if r.status_code != 200:
print(r.json())
raise RuntimeError('PUT {} failed: {}'.format(url, r.status_code))
# Publish
url = '{}/{}/actions/publish'.format(urlbase, dep_id)
print('Publishing: {}'.format(url))
r = requests.post(url, params=params)
if r.status_code != 202:
raise RuntimeError('POST {} failed: {}'.format(url, r.status_code))
def make_meta(templatefile, version, date):
with open(templatefile, 'rt') as f:
template = f.read()
template = j2.Template(template)
env = {
'VERSION' : version,
'DATE' : date,
}
return json.loads(template.render(**env))
if __name__ == '__main__':
zurl = sys.argv[1]
tkn = sys.argv[2]
depid = sys.argv[3]
upfile = sys.argv[4]
metafile = sys.argv[5]
version = sys.argv[6]
date = sys.argv[7]
meta = make_meta(metafile, version, date)
deposit(zurl, tkn, depid, upfile, meta)
#!/bin/bash
tmp=`dirname $0`
pushd $tmp > /dev/null
thisdir=`pwd`
popd > /dev/null
zenodo_url=$1
zenodo_tkn=$2
zenodo_depid=$3
version=$(cat fsl/version.py |
egrep '^__version__ +=' |
cut -d "=" -f 2 |
tr -d "'" |
tr -d ' ')
upfile=$(pwd)/dist/fslpy-"$version".tar.gz
metafile=$(pwd)/.ci/zenodo_meta.json.jinja2
date=$(date +"%Y-%m-%d")
pip install --retries 10 requests jinja2
python "$thisdir"/zenodo.py \
"$zenodo_url" \
"$zenodo_tkn" \
"$zenodo_depid" \
"$upfile" \
"$metafile" \
"$version" \
"$date"
{
"metadata" : {
"title" : "fslpy",
"upload_type" : "software",
"version" : "{{VERSION}}",
"publication_date" : "{{DATE}}",
"description" : "<p>The fslpy project is a <a href=\"https://fsl.fmrib.ox.ac.uk/fsl/fslwiki\">FSL</a> programming library written in Python. It is used by <a href=\"http://git.fmrib.ox.ac.uk/fsl/fsleyes/fsleyes/\">FSLeyes</a>.</p>\n\n<p>The fslpy library is developed at the Wellcome Centre for Integrative Neuroimaging (FMRIB), at the University of Oxford. It is hosted at <a href=\"https://git.fmrib.ox.ac.uk/fsl/fslpy/\">https://git.fmrib.ox.ac.uk/fsl/fslpy/</a>.</p>",
"keywords" : ["python", "mri", "neuroimaging", "neuroscience"],
"access_right" : "open",
"license" : "Apache-2.0",
"creators" : [
{ "name" : "McCarthy, Paul" },
{ "name" : "Cottaar, Michiel" },
{ "name" : "Webster, Matthew" },
{ "name" : "Fitzgibbon, Sean" },
{ "name" : "Craig, Martin" }
]
}
}
......@@ -9,12 +9,12 @@
#
# 2. style: Check coding style
#
# 3. doc: Building API documentation
# 3. doc: Building and upload API documentation using GitLab Pages.
#
# 4. build: Building source and wheel distributions
#
# 5. deploy: Uploading the build outputs to pypi/hosting servers, and the
# documentation to a hosting server.
# 5. deploy: Uploading the build outputs to pypi/hosting servers.
#
#
# Custom docker images are used for several jobs - these images are
# available at:
......@@ -24,13 +24,10 @@
# The test and style stages are executed on all branches of upstream and fork
# repositories.
#
# The doc stage, and the deploy-doc job, is executed on all branches of the
# upstream repository.
#
# The build stage, and the remaining jobs in the deploy stage, are only
# executed on the upstream repository, and only for release tags.
# The doc stage is executed on release branches of the upstream repository.
#
# The deploy stages are manually instantiated.
# The build and deploy stages are executed on tags on the upstream
# repository, and the deploy stage must be manually instantiated.
#
# Most of the logic for each job is defined in shell scripts in the .ci
# sub-directory.
......@@ -61,22 +58,25 @@ stages:
# - SSH_PRIVATE_KEY_FSL_DOWNLOAD - private key for downloading some FSL
# files from a remote server (FSL_HOST)
#
# - SSH_PRIVATE_KEY_DOC_DEPLOY - private key for rsyncing documentation
# to remote host (DOC_HOST)
#
# - SSH_SERVER_HOSTKEYS - List of trusted SSH hosts
#
# - DOC_HOST: - Username@host to upload documentation to
# (e.g. "paulmc@jalapeno.fmrib.ox.ac.uk")
#
# - FSL_HOST: - Username@host to download FSL data from
# (e.g. "paulmc@jalapeno.fmrib.ox.ac.uk")
# (most likely "paulmc@localhost")
#
# - FSL_ATLAS_DIR: - Location of the FSL atlas data on
# FSL_HOST.
#
# - TWINE_USERNAME: - Username to use when uploading to pypi
#
# - TWINE_PASSWORD: - Password to use when uploading to pypi
#
# - TWINE_REPOSITORY_URL: - Pypi repository to upload to
#
# - ZENODO_URL: - Zenodo URL to deposit release file to.
#
# - ZENODO_TOKEN: - Zenodo access token.
#
# - ZENODO_DEPOSIT_ID: - Deposit ID of previous Zenodo deposit.
###############################################################################
......@@ -91,14 +91,9 @@ variables:
####################################
.only_upstream: &only_upstream
.only_release_branches: &only_release_branches
only:
- branches@fsl/fslpy
.only_master: &only_master
only:
- master@fsl/fslpy
- /^v.+$/@fsl/fslpy
.only_releases: &only_releases
......@@ -106,40 +101,53 @@ variables:
- tags@fsl/fslpy
.except_releases: &except_releases
except:
- tags
.setup_ssh: &setup_ssh
before_script:
- bash ./.ci/setup_ssh.sh
###################################################
# The patch_version anchor contains a before_script
# The check_version anchor contains a before_script
# section which is run on release builds, and makes
# sure that the version in the code is up to date
# (i.e. equal to the tag name).
###################################################
.patch_version: &patch_version
.check_version: &check_version
before_script:
- bash ./.ci/patch_version.sh
- bash ./.ci/check_version.sh
############
# Test stage
############
.test_rules: &test_rules
# We only run tests on MRs, and on release branches
# (a more substantial test suite is run on release
# branches - see .ci/test_template.sh). We don't run
# on upstream/main, as all merges are fast-forwards,
# so the tests will have already been run on the MR
# branch. We also allow manually running a pipeline
# via the web interface.
rules:
- if: $SKIP_TESTS != null
when: never
- if: $CI_COMMIT_MESSAGE =~ /\[skip-tests\]/
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: on_success
- if: $CI_PROJECT_PATH == $UPSTREAM_PROJECT && $CI_COMMIT_BRANCH =~ /^v.+$/
when: on_success
- if: $CI_PIPELINE_SOURCE == "web"
when: on_success
- when: never
.test: &test_template
<<: *setup_ssh
# Releases are just tags on a release
# branch, so we don't need to test them.
<<: *except_releases
<<: *test_rules
tags:
- docker
......@@ -148,24 +156,42 @@ variables:
- bash ./.ci/test_template.sh
test:3.5:
test:3.10:
stage: test
image: pauldmccarthy/fsleyes-py310-wxpy4-gtk3
<<: *test_template
test:3.11:
stage: test
image: pauldmccarthy/fsleyes-py35-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py311-wxpy4-gtk3
<<: *test_template
test:3.6:
test:3.12:
stage: test
image: pauldmccarthy/fsleyes-py36-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py312-wxpy4-gtk3
<<: *test_template
test:3.7:
test:3.13:
stage: test
image: pauldmccarthy/fsleyes-py37-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py313-wxpy4-gtk3
<<: *test_template
test:build-pypi-dist:
stage: test
image: python:3.10
<<: *test_rules
tags:
- docker
script:
- bash ./.ci/build_pypi_dist.sh
#############
# Style stage
#############
......@@ -173,33 +199,37 @@ test:3.7:
style:
stage: style
image: pauldmccarthy/fsleyes-py35-wxpy4-gtk2
image: pauldmccarthy/fsleyes-py310-wxpy4-gtk3
<<: *test_template
variables:
TEST_STYLE: "true"
###########
# Doc stage
###########
#############
# Pages stage
#############
# I would like to have separate doc deploys for
# both the main and latest release branches,
# but this is awkward with gitlab pages. So
# currently the most recently executed pages
# job is the one that gets deployed.
build-doc:
<<: *only_upstream
<<: *patch_version
pages:
<<: *only_release_branches
tags:
- docker
stage: doc
image: python:3.6
image: pauldmccarthy/fsleyes-py310-wxpy4-gtk3
script:
- bash ./.ci/build_doc.sh
artifacts:
expire_in: 1 day
paths:
- doc/$CI_COMMIT_REF_NAME
- public
#############
......@@ -209,10 +239,10 @@ build-doc:
build-pypi-dist:
<<: *only_releases
<<: *patch_version
<<: *check_version
stage: build
image: python:3.6
image: python:3.10
tags:
- docker
......@@ -231,29 +261,29 @@ build-pypi-dist:
##############
deploy-doc:
<<: *only_upstream
deploy-pypi:
<<: *only_releases
<<: *setup_ssh
stage: deploy
when: manual
image: python:3.6
image: python:3.10
tags:
- docker
dependencies:
- build-doc
- build-pypi-dist
script:
- bash ./.ci/deploy_doc.sh
- bash ./.ci/deploy_pypi.sh
deploy-pypi:
deploy-zenodo:
<<: *only_releases
<<: *setup_ssh
stage: deploy
when: manual
image: python:3.6
image: python:3.10
tags:
- docker
......@@ -262,4 +292,4 @@ deploy-pypi:
- build-pypi-dist
script:
- bash ./.ci/deploy_pypi.sh
- bash ./.ci/zenodo_deposit.sh "$ZENODO_URL" "$ZENODO_TOKEN" "$ZENODO_DEPOSIT_ID"
......@@ -2,4 +2,8 @@ Paul McCarthy <pauldmccarthy@gmail.com>
Michiel Cottaar <michiel.cottaar@ndcn.ox.ac.uk>
Matthew Webster <matthew.webster@ndcn.ox.ac.uk>
Sean Fitzgibbon <sean.fitzgibbon@ndcn.ox.ac.uk>
Martin Craig <martin.craig@eng.ox.ac.uk>
\ No newline at end of file
Martin Craig <martin.craig@eng.ox.ac.uk>
Taylor Hanayik <taylor.hanayik@ndcn.ox.ac.uk>
Evan Edmond <evan.edmond@ndcn.ox.ac.uk>
Christoph Arthofer <christoph.arthofer@ndcn.oxc.ac.uk>
Fidel Alfaro Almagro <fidel.alfaroalmagro@ndcn.ox.ac.uk>
\ No newline at end of file
This diff is collapsed.
Copyright 2016-2018 University of Oxford, Oxford, UK
Copyright 2016-2023 University of Oxford, Oxford, UK
The fslpy library
Copyright 2016-2017 University of Oxford, Oxford, UK.
Copyright 2016-2023 University of Oxford, Oxford, UK.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
......
include LICENSE
include AUTHOR
include CHANGELOG.rst
include COPYRIGHT
include requirements.txt
include requirements-dev.txt
include requirements-extra.txt
include pytest.ini
recursive-include doc *
recursive-exclude doc/html *
recursive-include tests *
include LICENSE
include README.rst
include conftest.py
recursive-include doc *
recursive-include fsl/tests *
fslpy
=====
.. image:: https://git.fmrib.ox.ac.uk/fsl/fslpy/badges/master/build.svg
:target: https://git.fmrib.ox.ac.uk/fsl/fslpy/commits/master/
.. image:: https://git.fmrib.ox.ac.uk/fsl/fslpy/badges/master/coverage.svg
:target: https://git.fmrib.ox.ac.uk/fsl/fslpy/commits/master/
.. image:: https://img.shields.io/pypi/v/fslpy.svg
:target: https://pypi.python.org/pypi/fslpy/
.. image:: https://anaconda.org/conda-forge/fslpy/badges/version.svg
:target: https://anaconda.org/conda-forge/fslpy
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.1470750.svg
:target: https://doi.org/10.5281/zenodo.1470750
.. image:: https://git.fmrib.ox.ac.uk/fsl/fslpy/badges/master/coverage.svg
:target: https://git.fmrib.ox.ac.uk/fsl/fslpy/commits/master/
The ``fslpy`` project is a `FSL <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/>`_
programming library written in Python. It is used by `FSLeyes
<https://git.fmrib.ox.ac.uk/fsl/fsleyes/fsleyes/>`_.
``fslpy`` is tested against Python versions 3.5, 3.6 and 3.7.
``fslpy`` is tested against Python versions 3.10, 3.11, 3.12, and 3.13.
Installation
......@@ -41,11 +40,11 @@ Dependencies
------------
All of the core dependencies of ``fslpy`` are listed in the `requirements.txt
<requirements.txt>`_ file.
All of the core dependencies of ``fslpy`` are listed in the
`pyproject.toml <pyproject.toml>`_ file.
Some extra dependencies are listed in `requirements.txt
<requirements-extra.txt>`_ which provide addditional functionality:
Some optional dependencies (labelled ``extra`` in ``pyproject.toml``) provide
addditional functionality:
- ``wxPython``: The `fsl.utils.idle <fsl/utils/idle.py>`_ module has
functionality to schedule functions on the ``wx`` idle loop.
......@@ -58,45 +57,56 @@ Some extra dependencies are listed in `requirements.txt
class has some methods which use ``trimesh`` to perform geometric queries
on the mesh.
- ``Pillow``: The `fsl.data.bitmap.Bitmap <fsl/data/bitmap.py>`_ class uses
``Pillow`` to load image files.
If you are using Linux, you need to install wxPython first, as binaries are
not available on PyPI. Change the URL for your specific platform::
not available on PyPI. Install wxPython like so, changing the URL for your
specific platform::
pip install -f https://extras.wxpython.org/wxPython4/extras/linux/gtk2/ubuntu-16.04/ wxpython
The ``rtree`` library also assumes that ``libspatialindex`` is installed on
your system.
Once wxPython has been installed, you can simply type the following to install
the rest of the extra dependencies::
Once wxPython has been installed, you can type the following to install the
remaining optional dependencies::
pip install fslpy[extras]
pip install "fslpy[extra]"
Dependencies for testing and documentation are listed in the
`requirements-dev.txt <requirements-dev.txt>`_ file.
Dependencies for testing and documentation are also listed in ``pyproject.toml``,
and are respectively labelled as ``test`` and ``doc``.
Non-Python dependencies
^^^^^^^^^^^^^^^^^^^^^^^
The ``fsl.data.dicom`` module requires the presence of Chris Rorden's
`dcm2niix <https://github.com/rordenlab/dcm2niix>`_ program.
The `fsl.data.dicom <fsl/data/dicom.py>`_ module requires the presence of
Chris Rorden's `dcm2niix <https://github.com/rordenlab/dcm2niix>`_ program.
The ``rtree`` library assumes that ``libspatialindex`` is installed on
your system.
The `fsl.transform.x5 <fsl/transform/x5.py>`_ module uses `h5py
<https://www.h5py.org/>`_, which requires ``libhdf5``.
Documentation
-------------
API documentation for ``fslpy`` is hosted at
https://open.win.ox.ac.uk/pages/fsl/fslpy/.
``fslpy`` is documented using `sphinx <http://http://sphinx-doc.org/>`_. You
can build the API documentation by running::
pip install -r requirements-dev.txt
python setup.py doc
pip install ".[doc]"
sphinx-build doc html
The HTML documentation will be generated and saved in the ``doc/html/``
The HTML documentation will be generated and saved in the ``html/``
directory.
......@@ -105,11 +115,15 @@ Tests
Run the test suite via::
pip install -r requirements-dev.txt
python setup.py test
pip install ".[test]"
pytest
A test report will be generated at ``report.html``, and a code coverage report
will be generated in ``htmlcov/``.
Some tests will only pass if the test environment meets certain criteria -
refer to the ``tool.pytest.init_options`` section of
[``pyproject.toml``](pyproject.toml) for a list of [pytest
marks](https://docs.pytest.org/en/7.1.x/example/markers.html) which can be
selectively enabled or disabled.
Contributing
......
......@@ -14,12 +14,6 @@ import numpy as np
def pytest_addoption(parser):
parser.addoption('--niters',
type=int,
action='store',
default=50,
help='Number of test iterations for imagewrapper')
parser.addoption('--testdir',
action='store',
help='FSLeyes test data directory')
......@@ -29,11 +23,6 @@ def pytest_addoption(parser):
help='Seed for random number generator')
@pytest.fixture
def niters(request):
"""Number of test iterations."""
return request.config.getoption('--niters')
@pytest.fixture
def seed(request):
......
/* override table width restrictions */
.wy-table-responsive table td, .wy-table-responsive table th {
white-space: normal;
}
.wy-table-responsive {
margin-bottom: 24px;
max-width: 100%;
overflow: visible;
}
......@@ -12,12 +12,70 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import glob
import itertools as it
import os
import os.path as op
import sys
import datetime
date = datetime.date.today()
def check_for_missing_stubs():
docdir = op.dirname(__file__)
basedir = op.join(docdir, '..')
modules = []
def tomodname(f):
if f.endswith('.py'):
f = f[:-3]
return op.relpath(op.join(dirpath, f), basedir).replace(op.sep, '.')
for dirpath, dirnames, filenames in os.walk(op.join(basedir, 'fsl')):
for d in dirnames:
if d == '__pycache__':
continue
if len(glob.glob(op.join(dirpath, d, '**', '*.py'), recursive=True)) == 0:
continue
modules.append(tomodname(d))
for f in filenames:
if not f.endswith('.py'):
continue
if f in ('__init__.py', '__main__.py'):
continue
modules.append(tomodname(f))
modules = [m for m in modules if not m.startswith('fsl.tests')]
# import fsl
# modules = recurse(fsl)
# modules = [m.name for m in modules]
# print()
# print()
# print()
for mod in modules:
docfile = op.join(docdir, f'{mod}.rst')
if not op.exists(docfile):
print(f'No doc file found for module: {mod}')
for docfile in glob.glob(op.join(docdir, '*.rst')):
docfile = op.relpath(docfile, basedir)
mod = op.splitext(op.basename(docfile))[0]
if mod not in modules:
print(f'No module found for doc file: {docfile}')
if __name__ == '__main__':
check_for_missing_stubs()
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
......@@ -33,7 +91,8 @@ date = datetime.date.today()
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.viewcode',
'sphinx.ext.autosummary',
'sphinx.ext.mathjax',
'sphinx.ext.graphviz',
'sphinx.ext.todo',
......@@ -55,13 +114,13 @@ master_doc = 'index'
# General information about the project.
project = u'fslpy'
copyright = u'{}, Paul McCarthy, University of Oxford, Oxford, UK'.format(
copyright = u'{}, FMRIB Centre, University of Oxford, Oxford, UK'.format(
date.year)
# Links to other things
rst_epilog = """
.. |fsleyes_apidoc| replace:: FSLeyes
.. _fsleyes_apidoc: http://users.fmrib.ox.ac.uk/~paulmc/fsleyes_apidoc/index.html
.. _fsleyes_apidoc: http://users.fmrib.ox.ac.uk/~paulmc/fsleyes/userdoc/latest/index.html
"""
......@@ -121,6 +180,7 @@ pygments_style = 'sphinx'
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
......@@ -148,7 +208,11 @@ html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
html_static_path = ['_static']
html_css_files = [
'theme_overrides.css', # overrides for wide tables in RTD theme
]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
......@@ -353,26 +417,12 @@ epub_exclude_files = ['search.html']
# special-members flag)
autoclass_content = 'class'
# Document private members and special members (e.g. __init__)
autodocsourc_default_flags = ['private-members', 'special-members']
# Documentation for python modules is in the same order
# as the source code.
autodoc_member_order = 'bysource'
def autodoc_skip_member(app, what, name, obj, skip, options):
# Do not document the _sync_* properties
# that are added by the props package to
# all SyncableHasProperties classes.
if what == 'class':
attName = name.split('.')[-1]
return skip or attName.startswith('_sync_')
return skip or False
def setup(app):
app.connect('autodoc-skip-member', autodoc_skip_member)
autodoc_default_options = {
'special-members' : True,
'private-members' : True,
'undoc-members' : True,
'member-order' : 'bysource',
}
graphviz_output_format = 'svg'