Skip to content
Snippets Groups Projects
Commit 8974379b authored by Fidel Alfaro Almagro's avatar Fidel Alfaro Almagro :speech_balloon:
Browse files

Correcing bugs related to IDP generation

parent 057aa33f
No related branches found
No related tags found
No related merge requests found
Pipeline #18019 passed
Showing
with 216 additions and 16 deletions
No preview for this file type
Source diff could not be displayed: it is stored in LFS. Options to address this: view the blob.
......@@ -19,8 +19,15 @@ log = logging.getLogger(__name__)
def run(ctx,
JHUrois_FA: In(optional=True),
JHUrois_MD: In(optional=True),
JHUrois_MO: In(optional=True),
JHUrois_L1: In(optional=True),
JHUrois_L2: In(optional=True),
JHUrois_L3: In(optional=True),
JHUrois_ICVF: In(optional=True),
JHUrois_OD: In(optional=True),
JHUrois_ISOVF: In(optional=True),
logs_dir: Ref,
JHUrois_prefix: Ref,
IDP_diff_TBSS: Out):
with redirect_logging(job_name(run), outdir=logs_dir):
......@@ -29,9 +36,9 @@ def run(ctx,
result = ""
for mod in ["FA", "MD", "MO", "L1", "L2", "L3", "ICVF", "OD", "ISOVF"]:
file_name = JHUrois_prefix + mod + ".txt"
for file_name in [JHUrois_FA, JHUrois_MD, JHUrois_MO, JHUrois_L1,
JHUrois_L2, JHUrois_L3, JHUrois_ICVF, JHUrois_OD,
JHUrois_ISOVF]:
print(file_name)
if op.exists(file_name):
with open(file_name, "r", encoding="utf-8") as f:
......
......@@ -41,7 +41,7 @@ def run(ctx,
wrappers.fslmerge("t", autoPtx_all, *list(aptx_tract_tmp.data))
tractnorm = wrappers.fslstats(autoPtx_all, t=True).m.run()
for suffix in ['FA','L1','L2','L3','MO','MD','ICVF','OD','ISOVF']:
for suffix in ['FA','MD','MO','L1','L2','L3','ICVF','OD','ISOVF']:
all_file = TBSS_prefix + suffix + ".nii.gz"
APTX_file = aptx_txt_prefix + suffix + ".txt"
......
#!/usr/bin/env python
#
# QC_T1_FIRST_vols.py - Copying QC file with the volumes from FIRST of T1.
#
# Author: Fidel Alfaro Almagro <fidel.alfaroalmagro@ndcn.ox.ac.uk>
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
# Author: Michiel Cottaar <michiel.cottaar@ndcn.ox.ac.uk>
#
# pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
# pylint: disable=W0613
#
from shutil import copyfile
from pipe_tree import In, Out
def run(ctx,
IDP_T1_FIRST_vols: In,
QC_T1_FIRST_vols: Out):
copyfile(src=IDP_T1_FIRST_vols, dst=QC_T1_FIRST_vols)
#!/usr/bin/env python
#
# QC_T1_SIENAX.py - Copying QC file with SIENAXvalues of T1.
#
# Author: Fidel Alfaro Almagro <fidel.alfaroalmagro@ndcn.ox.ac.uk>
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
# Author: Michiel Cottaar <michiel.cottaar@ndcn.ox.ac.uk>
#
# pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
# pylint: disable=W0613
#
from shutil import copyfile
from pipe_tree import In, Out
def run(ctx,
IDP_T1_SIENAX: In,
QC_T1_SIENAX: Out):
copyfile(src=IDP_T1_SIENAX, dst=QC_T1_SIENAX)
#!/usr/bin/env python
#
# QC_T1_align_to_std.py - Copying QC file with alignment to std metrics.
#
# Author: Fidel Alfaro Almagro <fidel.alfaroalmagro@ndcn.ox.ac.uk>
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
# Author: Michiel Cottaar <michiel.cottaar@ndcn.ox.ac.uk>
#
# pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
# pylint: disable=W0613
#
from shutil import copyfile
from pipe_tree import In, Out
def run(ctx,
IDP_T1_align_to_std: In,
QC_T1_align_to_std: Out):
copyfile(src=IDP_T1_align_to_std, dst=QC_T1_align_to_std)
#!/usr/bin/env python
#
# QC_diff_autoPtx.py - Generating QC file with autoPtx metrics
#
# Author: Fidel Alfaro Almagro <fidel.alfaroalmagro@ndcn.ox.ac.uk>
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
# Author: Michiel Cottaar <michiel.cottaar@ndcn.ox.ac.uk>
#
# pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
#
import os.path as op
import json
import logging
from pipe_tree import In, Out, Ref
from bip.utils.log_utils import redirect_logging, job_name
log = logging.getLogger(__name__)
def run(ctx,
QC_T1_align_to_std: In(optional=True),
QC_T1_noise_ratio: In(optional=True),
QC_T1_SIENAX: In(optional=True),
QC_T1_FIRST_vols: In(optional=True),
logs_dir: Ref,
QC_T1: Out):
with redirect_logging(job_name(run), outdir=logs_dir):
result = ""
QC_json_file = ctx.get_data("QC/QC_T1.json")
with open(QC_json_file, "r", encoding="utf-8") as f:
QC_dict = json.load(f)
for QC_file in [QC_T1_align_to_std, QC_T1_noise_ratio, QC_T1_SIENAX,
QC_T1_FIRST_vols]:
plain_name = op.basename(QC_file).replace(".txt", "")
num_QC = len(QC_dict[plain_name])
result_nans = ("NaN " * num_QC).strip()
if QC_file is not None and op.exists(QC_file):
with open(QC_file, "r", encoding="utf-8") as f:
QC_l = f.read().strip().split()
if len(QC_l) != num_QC:
result += " " + result_nans
else:
result += " " + " ".join(QC_l)
else:
result += " " + result_nans
result = result.replace(" ", " ").strip()
print(result)
with open(QC_T1, 'wt', encoding="utf-8") as f:
f.write(f'{result}\n')
#!/usr/bin/env python
#
# QC_T1_noise_ratio.py - Copying QC file with noise ratio of T1.
#
# Author: Fidel Alfaro Almagro <fidel.alfaroalmagro@ndcn.ox.ac.uk>
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
# Author: Michiel Cottaar <michiel.cottaar@ndcn.ox.ac.uk>
#
# pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
# pylint: disable=W0613
#
from shutil import copyfile
from pipe_tree import In, Out
def run(ctx,
IDP_T1_noise_ratio: In,
QC_T1_noise_ratio: Out):
copyfile(src=IDP_T1_noise_ratio, dst=QC_T1_noise_ratio)
#!/usr/bin/env python
#
# struct_T1.py - Pipeline with the T1w processing.
#
# Author: Fidel Alfaro Almagro <fidel.alfaroalmagro@ndcn.ox.ac.uk>
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
# Author: Michiel Cottaar <michiel.cottaar@ndcn.ox.ac.uk>
#
# pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
# pylint: disable=W0613,R1735
#
import logging
from bip.utils.log_utils import job_name
from bip.pipelines.QC_gen.T1 import QC_T1_align_to_std
from bip.pipelines.QC_gen.T1 import QC_T1_noise_ratio
from bip.pipelines.QC_gen.T1 import QC_T1_SIENAX
from bip.pipelines.QC_gen.T1 import QC_T1_FIRST_vols
from bip.pipelines.QC_gen.T1 import QC_T1_generator
log = logging.getLogger(__name__)
def add_to_pipeline(ctx, pipe, tree):
subj = ctx.subject
pipe(QC_T1_align_to_std.run,
submit=dict(jobtime=200, name=job_name(QC_T1_align_to_std.run, subj)),
kwargs={'ctx' : ctx})
pipe(QC_T1_noise_ratio.run,
submit=dict(jobtime=200, name=job_name(QC_T1_noise_ratio.run, subj)),
kwargs={'ctx' : ctx})
pipe(QC_T1_SIENAX.run,
submit=dict(jobtime=200, name=job_name(QC_T1_SIENAX.run, subj)),
kwargs={'ctx' : ctx})
pipe(QC_T1_FIRST_vols.run,
submit=dict(jobtime=200, name=job_name(QC_T1_FIRST_vols.run, subj)),
kwargs={'ctx' : ctx})
pipe(QC_T1_generator.run,
submit=dict(jobtime=200, name=job_name(QC_T1_generator.run, subj)),
kwargs={'ctx' : ctx})
......@@ -36,8 +36,7 @@ def run(ctx,
bedpostx_nodif_brain: Out,
bedpostx_nodif_brain_mask: Out,
bedpostx_nvox: Out,
bedpostx_eye: Out,
bp_logs_dir: Out):
bedpostx_eye: Out):
with redirect_logging(job_name(run), outdir=logs_dir):
......
......@@ -48,10 +48,10 @@ def run(ctx,
# In case the kernels were already calculated for this study:
# TODO: Verify that this is regular UKB data.
# Otherwise, this should not be done
if not op.exists(op.join(os.getcwd(), 'kernels/')):
if not op.exists(op.join(os.getcwd(), 'kernels')):
with lockdir(dirname=os.getcwd(), delay=10):
# 2 checks will be needed
if not op.exists(op.join(os.getcwd(), 'kernels/')):
if not op.exists(op.join(os.getcwd(), 'kernels')):
if op.exists(ctx.get_data('dMRI/kernels.zip')):
with zipfile.ZipFile(ctx.get_data('dMRI/kernels.zip'),
'r') as zip_ref:
......
......@@ -25,6 +25,8 @@ log = logging.getLogger(__name__)
def run(ctx,
FA: In,
ISOVF: In(optional=True),
OD: In(optional=True),
ICVF: In(optional=True),
logs_dir: Ref,
tmp_dir: Ref,
TBSS_MNI_to_dti_FA_warp_msf: Ref,
......@@ -51,7 +53,15 @@ def run(ctx,
TBSS_mean_FA_skeleton: Out,
TBSS_mean_FA_skeleton_mask: Out,
TBSS_all_FA_skeletonised: Out,
JHUrois_FA: Out):
JHUrois_FA: Out,
JHUrois_MD: Out,
JHUrois_MO: Out,
JHUrois_L1: Out,
JHUrois_L2: Out,
JHUrois_L3: Out,
JHUrois_ICVF: Out,
JHUrois_OD: Out,
JHUrois_ISOVF: Out,):
with redirect_logging(job_name(run), outdir=logs_dir),\
tempdir(op.join(tmp_dir, job_name(run))) as tmp_dir:
......@@ -148,7 +158,7 @@ def run(ctx,
f.write(f'{mean}')
# Applying to the outputs of both dtifit and noddi
dict_suffix = {dtifit_prefix: ['L1', 'L2', 'L3', 'MO', 'MD'],
dict_suffix = {dtifit_prefix: ['MO', 'MD', 'L1', 'L2', 'L3'],
NODDI_prefix: ['ICVF', 'OD', 'ISOVF']}
for prefix in dict_suffix:
......@@ -169,3 +179,7 @@ def run(ctx,
mean = wrappers.fslstats(d_output_skel, K=atlas).M.run()
np.savetxt(d_output_txt, mean, fmt="%0.8f")
else:
num_tracts = ctx.get('dMRI_TBSS_num_tracts', 48)
str_NaNs = ["NaN" for _ in range(num_tracts)]
print("[" + " ".join(str_NaNs) + "]\n")
......@@ -26,7 +26,7 @@ def run(ctx,
FreeSurfer_dir: In,
logs_dir: Ref,
FS_IDPs: Out,
FS_headers_info: Out):
FS_headers_info: Ref):
with redirect_logging(job_name(run), outdir=logs_dir):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment