diff --git a/bip/VERSION b/bip/VERSION new file mode 100644 index 0000000000000000000000000000000000000000..468437494697b8d457e2b251edb3e76047d0321b --- /dev/null +++ b/bip/VERSION @@ -0,0 +1 @@ +1.8 \ No newline at end of file diff --git a/bip/__init__.py b/bip/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..83b51e1ed45def769589beb2535ea4606e3f10d7 --- /dev/null +++ b/bip/__init__.py @@ -0,0 +1,4 @@ +import os.path as op + +basedir = op.dirname(__file__) +__version__ = open(op.join(basedir, 'VERSION')).read() diff --git a/bip/pipelines/IDPs_gen/IDP_diff_autoPtx.py b/bip/pipelines/IDPs_gen/IDP_diff_autoPtx.py index a31994db9cc8b018ef19e33ba3cbc2051fb0d559..d0b6a7f12f6b6160ef886ece505f1e6956b8d42c 100755 --- a/bip/pipelines/IDPs_gen/IDP_diff_autoPtx.py +++ b/bip/pipelines/IDPs_gen/IDP_diff_autoPtx.py @@ -14,7 +14,7 @@ import os import logging import nibabel as nib from fsl import wrappers -from pipe_tree import In, Out, Ref +from pipe_tree import In, Out, Ref, Var from bip.utils.log_utils import redirect_logging, tempdir log = logging.getLogger(__name__) @@ -24,7 +24,8 @@ def run(ctx, TBSS_prefix: Ref, aptx_txt_prefix: Ref, tmp_dir: Ref, - aptx_tract_tmp: In(no_iter=True), + aptx_tract_tmp: In, + autoptx_tract: Var(no_iter=True), IDP_diff_autoPtx: Out): with redirect_logging('IDP_diff_autoPtx', outdir=logs_dir),\ @@ -37,7 +38,7 @@ def run(ctx, result = "" # TODO: See if it is possible to generate the aptx_tract_tmp files here - wrappers.fslmerge(autoPtx_all, *list(aptx_tract_tmp.data)) + wrappers.fslmerge("t", autoPtx_all, *list(aptx_tract_tmp.data)) tractnorm = wrappers.fslstats(autoPtx_all, t=True).m.run() for suffix in ['FA','L1','L2','L3','MO','MD','ICVF','OD','ISOVF']: @@ -59,7 +60,7 @@ def run(ctx, with open(APTX_file, 'wt', encoding="utf-8") as f: f.write(f'{cad}\n') - result += cad + result += " " + cad else: result += " " + result_NaN diff --git a/bip/pipelines/IDPs_gen/IDP_func_task_activation.py b/bip/pipelines/IDPs_gen/IDP_func_task_activation.py index 27da52e57324f75f99a7b97823506aadd0004405..d56aab7ca067ac5241ee7eeaec7b492f640ffbfd 100755 --- a/bip/pipelines/IDPs_gen/IDP_func_task_activation.py +++ b/bip/pipelines/IDPs_gen/IDP_func_task_activation.py @@ -76,20 +76,20 @@ def run(ctx, N_tfMRI_featquery_5a_dir = tfMRI_featquery_5a_dir.replace(tfMRI_feat,"") - wrappers.featquery(N_featdirs="1", featdir1=tfMRI_feat, N_stats="2", - stats1=[N_tfMRI_cope1, N_tfMRI_zstat1], + wrappers.featquery(featdirs=[tfMRI_feat], + stats=[N_tfMRI_cope1, N_tfMRI_zstat1], outputRootName=N_tfMRI_featquery_1_dir, mask=group_mask_1) - wrappers.featquery(N_featdirs="1", featdir1=tfMRI_feat, N_stats="2", - stats1=[N_tfMRI_cope2, N_tfMRI_zstat2], + wrappers.featquery(featdirs=[tfMRI_feat], + stats=[N_tfMRI_cope2, N_tfMRI_zstat2], outputRootName=N_tfMRI_featquery_2_dir, mask=group_mask_2) - wrappers.featquery(N_featdirs="1", featdir1=tfMRI_feat, N_stats="2", - stats1=[N_tfMRI_cope5, N_tfMRI_zstat5], + wrappers.featquery(featdirs=[tfMRI_feat], + stats=[N_tfMRI_cope5, N_tfMRI_zstat5], outputRootName=N_tfMRI_featquery_5_dir, mask=group_mask_5) - wrappers.featquery(N_featdirs="1", featdir1=tfMRI_feat, N_stats="2", - stats1=[N_tfMRI_cope5, N_tfMRI_zstat5], + wrappers.featquery(featdirs=[tfMRI_feat], + stats=[N_tfMRI_cope5, N_tfMRI_zstat5], outputRootName=N_tfMRI_featquery_5a_dir, mask=group_mask_5a) diff --git a/bip/pipelines/IDPs_gen/IDPs_gen.py b/bip/pipelines/IDPs_gen/IDPs_gen.py index ae8d60abbd87b750998ad00b224f55046c65aba8..52e4e13174e3f36d8d6061bf4be90aa29178e760 100755 --- a/bip/pipelines/IDPs_gen/IDPs_gen.py +++ b/bip/pipelines/IDPs_gen/IDPs_gen.py @@ -27,7 +27,7 @@ from bip.pipelines.IDPs_gen import IDP_func_TSNR from bip.pipelines.IDPs_gen import IDP_func_task_activation from bip.pipelines.IDPs_gen import IDP_diff_eddy_outliers from bip.pipelines.IDPs_gen import IDP_diff_TBSS -#from bip.pipelines.IDPs_gen import IDP_diff_autoPtx +from bip.pipelines.IDPs_gen import IDP_diff_autoPtx from bip.pipelines.IDPs_gen import IDPs_generator log = logging.getLogger(__name__) @@ -109,10 +109,10 @@ def add_to_pipeline(ctx, pipe, tree, targets): kwargs={'ctx' : ctx}) targets.append('IDP_diff_TBSS') - #pipe(IDP_diff_autoPtx.run, - # submit=dict(jobtime=200, name="BIP_IDP_diff_autoPtx_"+ subj), - # kwargs={'ctx' : ctx}) - #targets.append('IDP_diff_autoPtx') + pipe(IDP_diff_autoPtx.run, + submit=dict(jobtime=200, name="BIP_IDP_diff_autoPtx_"+ subj), + kwargs={'ctx' : ctx}) + targets.append('IDP_diff_autoPtx') pipe(IDP_subject_COG_table.run, submit=dict(jobtime=200, name="BIP_IDP_subject_COG_table_" + subj), diff --git a/bip/pipelines/dMRI_diff/diff_eddy.py b/bip/pipelines/dMRI_diff/diff_eddy.py index 9baeee980e10e7fba8bcd15b018f94f4356a8ea7..a4d286a22eef1c02dd90352c411d1d9c57c3b02c 100755 --- a/bip/pipelines/dMRI_diff/diff_eddy.py +++ b/bip/pipelines/dMRI_diff/diff_eddy.py @@ -42,7 +42,8 @@ def run(ctx, eddy_data: Out, eddy_outlier_report: Out): - with redirect_logging('diff_eddy', outdir=logs_dir): + #TODO: Do the __name__ for everything + with redirect_logging(__name__, outdir=logs_dir): # Creates links # TODO: These links are NOT relative. This may cause future problems. @@ -55,7 +56,6 @@ def run(ctx, if not os.path.exists(eddy_nodif_brain_mask_ud): os.symlink(src="../../../" + fieldmap_mask_ud, dst=eddy_nodif_brain_mask_ud) - # Generation of FSF file copyfile(src=AP_bval, dst=eddy_bvals) copyfile(src=AP_bvec, dst=eddy_bvecs) diff --git a/bip/pipelines/dMRI_diff/diff_tbss.py b/bip/pipelines/dMRI_diff/diff_tbss.py index 82dfce2270664e1eeb1faf53517f163ad1097dd0..ce77c626a5a2dd9bc3f1ca70ebad911c5c2407e3 100755 --- a/bip/pipelines/dMRI_diff/diff_tbss.py +++ b/bip/pipelines/dMRI_diff/diff_tbss.py @@ -59,7 +59,7 @@ def run(ctx, TBSS_FA_to_MNI_warp_s2 = tmp_dir + '/FA_to_MNI_warp_s2.nii.gz' # Creates links - # TODO: These links are NOT relative. This may cause future problems. + # TODO: Do the cool OS.PATH.RELPATH if not os.path.exists(TBSS_FA): os.symlink(src="../../../" + FA, dst=TBSS_FA) if not os.path.exists(TBSS_MNI): @@ -159,5 +159,4 @@ def run(ctx, wrappers.fslmaths(d_output).mas(TBSS_mean_FA_skeleton_mask).run(d_output_skel) mean = wrappers.fslstats( d_output_skel, K=atlas).M.run() - with open(d_output_txt, 'wt', encoding="utf-8") as f: - f.write(f'{mean}') + np.savetxt(d_output_txt, mean, fmt="%0.8f") diff --git a/bip/pipelines/dMRI_fieldmap/fieldmap_post_topup.py b/bip/pipelines/dMRI_fieldmap/fieldmap_post_topup.py index cef087bd17c1fdb79bd97dec3ea55ebfcd2456fb..745eb55bb8c84cc1c333342705c787f875855f9a 100755 --- a/bip/pipelines/dMRI_fieldmap/fieldmap_post_topup.py +++ b/bip/pipelines/dMRI_fieldmap/fieldmap_post_topup.py @@ -75,7 +75,7 @@ def run(ctx, x1,y1,z1 = np.where(B0_AP_corr_tmp_imgf==0) x2,y2,z2 = np.where(B0_PA_corr_tmp_imgf==0) - #For AP voxels with 0 value,, get the values in PA (And viceversa) + #For AP voxels with 0 value, get the values in PA (And viceversa) B0_AP_fixed_tmp_imgf = B0_AP_corr_tmp_imgf B0_PA_fixed_tmp_imgf = B0_PA_corr_tmp_imgf B0_AP_fixed_tmp_imgf[x1,y1,z1] = B0_PA_corr_tmp_imgf[x1,y1,z1] diff --git a/bip/pipelines/dMRI_fieldmap/fieldmap_pre_topup.py b/bip/pipelines/dMRI_fieldmap/fieldmap_pre_topup.py index f6b2084919bf80dd3634d51e9a35fe5cbe612a57..0473ec10efd07bc6855239c427f86c1032c3bdc3 100755 --- a/bip/pipelines/dMRI_fieldmap/fieldmap_pre_topup.py +++ b/bip/pipelines/dMRI_fieldmap/fieldmap_pre_topup.py @@ -69,9 +69,11 @@ def choose_best_B0(ctx, img, bval, total_B0, indices, tmp, best_index, B0, scores[i,j] = corr scores[j,i] = corr - final_scores = np.zeros(N) - for i in range(N): - final_scores[i]=np.sum(scores[:,i]) / (N - 1) + # TODO: Do in one line + #final_scores = np.zeros(N) + #for i in range(N): + # final_scores[i]=np.sum(scores[:,i]) / (N - 1) + final_scores = np.sum(scores, axis=1) / (N - 1) best_ind = final_scores.argmax() best_score = final_scores[best_ind] @@ -201,3 +203,5 @@ def run(ctx, if (Zslices % 2) != 0: wrappers.fslroi(B0_AP_PA, B0_AP_PA, 0, -1, 0, -1, 0, Zslices -1) + # TODO: Discuss this with Jesper. + diff --git a/bip/pipelines/struct_FS/FS_get_IDPs.py b/bip/pipelines/struct_FS/FS_get_IDPs.py index df9944d0b2196fadb334dd469eb2e4e47ef94c2a..3a14ccf4e66f4078c34c6066b3c94741aa8741c8 100755 --- a/bip/pipelines/struct_FS/FS_get_IDPs.py +++ b/bip/pipelines/struct_FS/FS_get_IDPs.py @@ -517,6 +517,7 @@ def bb_FS_get_IDPs(ctx, env): dataDir = subjectDir + '/data/' headersDir = subjectDir + '/headers/' + #TODO: Raise an exception if not os.path.isdir(subjectDir): print("Error: FreeSurfer has not been run on this subject") sys.exit(-1) diff --git a/bip/pipelines/struct_FS/FS_proc.py b/bip/pipelines/struct_FS/FS_proc.py index d45c0485b94c5fced7a150c8b0f9b3e8476dd72a..a4bb840b7b3d0993a7d37c1fab21f36afe86f9de 100755 --- a/bip/pipelines/struct_FS/FS_proc.py +++ b/bip/pipelines/struct_FS/FS_proc.py @@ -32,6 +32,7 @@ def run(ctx, if os.path.exists(FreeSurfer_dir): shutil.rmtree(FreeSurfer_dir) + #TODO: SUBJECTS_DIR can be an argument of freesurfer instead of this craps env = dict(os.environ, SUBJECTS_DIR=os.getcwd() + "/" + ctx.subject) T1_path = os.getcwd() + "/" + T1_unbiased @@ -39,16 +40,13 @@ def run(ctx, cmd = 'recon-all -all -s FreeSurfer -i ' + T1_path - if os.path.exists(T2_FLAIR_unbiased): + #TODO: Do this for all cases of os.path.exits (all optional things) + if T2_FLAIR_unbiased is not None and os.path.exists(T2_FLAIR_unbiased): cmd += " -FLAIR " + T2_path + " -FLAIRpial" - output = run_command(log, cmd, env) - - log.info(output) - - if os.path.exists(fsaverage): - os.unlink(fsaverage) - - #ext_wrappers.recon_all(subjects_dir=os.getcwd(), - # directive="all", subjid="FreeSurfer", - # infile=T1_unbiased, FLAIR=opt_T2_FLAIR) + try: + output = run_command(log, cmd, env=env) + log.info(output) + finally: + if os.path.exists(fsaverage): + os.unlink(fsaverage) diff --git a/bip/pipelines/struct_T1/T1_first.py b/bip/pipelines/struct_T1/T1_first.py index 8b1ca75033689c234d1adf751c2983e66375d886..71f15d69f89a704b878513a3052c46ae82749b17 100755 --- a/bip/pipelines/struct_T1/T1_first.py +++ b/bip/pipelines/struct_T1/T1_first.py @@ -37,6 +37,8 @@ def run(ctx, output=T1_first_prefix, b=True) for f in glob.glob(T1_first_prefix + "-*_first*.nii.gz"): - os.remove(f) + if os.path.exists(f): + os.remove(f) for f in glob.glob(T1_first_prefix + "-*_corr*.nii.gz"): - os.remove(f) + if os.path.exists(f): + os.remove(f) diff --git a/bip/utils/log_utils.py b/bip/utils/log_utils.py index 2666ef006fa19a4423ba74879d6dbbd216bdafed..1abf843c9848cdf7274fd57e55b8ea1b79e4a720 100755 --- a/bip/utils/log_utils.py +++ b/bip/utils/log_utils.py @@ -22,13 +22,13 @@ from subprocess import check_output from fsl import wrappers -#def run_command(logger, command, **kwargs): This fails -def run_command(logger, command, env): +def run_command(logger, command, **kwargs): This fails +#def run_command(logger, command, env): try: logger.info(command.strip()) # This fails - #job_output = check_output(command,shell=True,**kwargs).decode('UTF-8') - job_output = check_output(command,shell=True,env=env).decode('UTF-8') + job_output = check_output(command,shell=True,**kwargs).decode('UTF-8') + #job_output = check_output(command,shell=True,env=env).decode('UTF-8') logger.info('Output: \t' + job_output.strip()) except Exception as e: @@ -36,7 +36,7 @@ def run_command(logger, command, env): logger.error('Exception type: \t' + str(type(e))) logger.error('Exception args: \t' + str(e.args)) logger.error('Exception message: \t' + str(e)) - job_output = "" + raise return job_output.strip() @@ -136,14 +136,11 @@ def redirect_logging(name, outdir='.', level=logging.INFO, verbose=True): if getattr(log, 'handler', None): log.addHandler(log.handler) - @contextlib.contextmanager def tempdir(dir_name): + ''' This is meant to remove the temp directory only if the calling function + succeeds. + ''' os.makedirs(dir_name, exist_ok=True) yield dir_name - for file_name in os.listdir(dir_name): - file_path = os.path.join(dir_name, file_name) - try: - shutil.rmtree(file_path) - except OSError: - os.remove(file_path) + shutil.rmtree(dir_name) diff --git a/setup.py b/setup.py index 8bc548eac6e1396aaf98513c0f1e49b9e5af8fb3..1cd3ee64cc721c3786cc5dc8b2b90d484cb11025 100644 --- a/setup.py +++ b/setup.py @@ -1,9 +1,14 @@ +import os.path as op from setuptools import setup,find_packages + with open('requirements.txt', 'rt') as f: install_requires = [l.strip() for l in f.readlines()] +basedir = op.dirname(__file__) +version = open(op.join(basedir, 'bip', 'VERSION')).read().strip() + setup(name='bip', - version='1.0.0', + version=version, description='Brain Imaging Pipeline', author='Fidel Alfaro Almagro', install_requires=install_requires,