diff --git a/.gitignore b/.gitignore
index 3fe5323ae0f08e6ac7a4741d9d168358822a545a..365008ab678f6d9b5d619f708cabea6247c8a3f3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,6 +10,8 @@ __pycache__/
 
 # Distribution / packaging
 .Python
+TODOs
+PendingTasks
 build/
 obsolete/
 structure
diff --git a/bip/ext_wrappers/__init__.py b/bip/ext_wrappers/__init__.py
deleted file mode 100755
index 685f904fbf475979d62409f401c445ca7b21c06c..0000000000000000000000000000000000000000
--- a/bip/ext_wrappers/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-#
-# pylint: disable=unused-import
-# flake8: noqa: F401
-#
-# __init__.py - Wrappers for external command-line tools.
-#
-# Author: Fidel Alfaro Almagro <fidel.alfaroalmagro@ndcn.ox.ac.uk>
-#
-"""This package contains wrappers for various command line tools, allowing
-them to be called from Python.
-"""
-
-from bip.ext_wrappers.freesurfer import (recon_all,
-                                         asegstats2table,
-                                         aparcstats2table)
diff --git a/bip/ext_wrappers/freesurfer.py b/bip/ext_wrappers/freesurfer.py
deleted file mode 100644
index 8cfb1a883ad638cb3d70eb7c53106c01ddd63f8f..0000000000000000000000000000000000000000
--- a/bip/ext_wrappers/freesurfer.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-#
-# freesurfer.py - Wrappers for FreeSurfer and its sub-functions
-#
-# Author: Fidel Alfaro Almagro <fidel.alfaroalmagro@ndcn.ox.ac.uk>
-#
-"""This module provides wrapper functions for FreeSurfer
-
-"""
-
-import os
-import fsl.utils.assertions as asrt
-from fsl.wrappers import wrapperutils as wutils
-
-
-import sys
-
-def eprint(*args, **kwargs):
-    print(*args, file=sys.stderr, **kwargs)
-
-
-@wutils.fileOrImage('infile')
-@wutils.cmdwrapper
-def recon_all(subjects_dir, directive, subjid, infile, FLAIR=None, **kwargs):
-    """Wrapper for the ``recon-all`` command.
-    """
-    #asrt.assertIsNifti(*infile)
-
-    os.environ["SUBJECTS_DIR"] = subjects_dir
-
-    cmd = ['recon-all', "-" + directive + " -s " + subjid]
-    cmd += [" -i " + subjects_dir + "/" + infile]
-
-    if FLAIR:
-        cmd += [" -FLAIR " + subjects_dir + "/" + FLAIR + " -FLAIRpial -debug"]
-
-    return cmd
-
-@wutils.cmdwrapper
-def asegstats2table(subjects_dir, mask, tablefile, subjects, **kwargs):
-    """Wrapper for the ``asegstats2table`` command.
-    """
-    os.environ["SUBJECTS_DIR"] = subjects_dir
-    cmd = ['asegstats2table']
-
-    return cmd
-
-@wutils.cmdwrapper
-def aparcstats2table(subjects_dir, mask, tablefile, subjects, hemi, atlas,
-                     **kwargs):
-    """Wrapper for the ``aparcstats2table`` command.
-    """
-    os.environ["SUBJECTS_DIR"] = subjects_dir
-    cmd = ['aparcstats2table']
-
-    return cmd
diff --git a/bip/main.py b/bip/main.py
index 55ccda19aa978e844f81a8dc3510c33c2a083069..ab6ef498f3e84a658386b7d34db30310040d642c 100755
--- a/bip/main.py
+++ b/bip/main.py
@@ -7,7 +7,7 @@
  '''
 
 import os
-import os.path
+import os.path as op
 import sys
 import logging
 import argparse
@@ -42,7 +42,7 @@ class Context:
     FSLDIR:     str = os.environ['FSLDIR']
     gdc:        str = field(init=False)
 
-    with open(BB_BIN_DIR + '/data/dMRI/autoptx/struct_list.json', 'r') as f:
+    with open(self.get_data('dMRI/autoptx/struct_list.json'), 'r') as f:
         tract_struct = json.load(f)
 
     def __post_init__(self):
@@ -50,20 +50,26 @@ class Context:
 
     @property
     def MNI(self):
-        return self.FSLDIR + '/data/standard/MNI152_T1_1mm'
+        return self.get_standard('MNI152_T1_1mm')
 
     @property
     def MNI_brain_mask(self):
-        return self.FSLDIR + '/data/standard/MNI152_T1_1mm_brain_mask'
+        return self.get_standard('MNI152_T1_1mm_brain_mask')
 
     def get_standard(self, fileName):
-        return self.FSLDIR + '/data/standard/' + fileName
+        fileName.replace('/', os.sep)
+        basedir = self.FSLDIR + os.sep + op.foin('data', 'standard') + os.sep
+        return basedir + fileName
 
     def get_atlas(self, fileName):
-        return self.FSLDIR + '/data/atlases/' + fileName
+        fileName.replace('/', os.sep)
+        basedir = self.FSLDIR + os.sep + op.foin('data', 'atlases') + os.sep
+        return basedir + fileName
 
     def get_data(self, fileName):
-        return self.BB_BIN_DIR + '/data/' + fileName
+        fileName.replace('/', os.sep)
+        basedir = self.BB_BIN_DIR + os.sep + 'data' + os.sep
+        return basedir + fileName
 
     #def save_context(self, file_name):
     #    with open(file_name, "wt") as config_file:
@@ -160,7 +166,7 @@ def parseArguments(ctx):
     if coeff == "":
         coeff = "skyra"
     if coeff not in ["skyra", "prisma", "none"]:
-        if not os.path.exists(coeff):
+        if not op.exists(coeff):
             log.error("ERROR: Subject cannot be run. Incorrect GDC file specified: " +
                         coeff)
             sys.exit(1)
@@ -170,7 +176,7 @@ def parseArguments(ctx):
         coeff = ctx.get_data("GDC/bb_GDC_coeff_skyra.grad")
     elif coeff == "prisma":
         coeff = ctx.get_data("GDC/bb_GDC_coeff_prisma.grad")
-    if not os.path.exists(coeff):
+    if not op.exists(coeff):
         coeff = "none"
 
     # Parsing number of SWI coils argument
@@ -247,7 +253,7 @@ def parseArguments(ctx):
     naming_patterns = argsa.naming_patterns
     naming_patterns = naming_patterns.strip()
 
-    if not os.path.exists(naming_patterns):
+    if not op.exists(naming_patterns):
         log.error("ERROR: Subject cannot be run. Incorrect naming pattern file specified: " +
                      naming_patterns)
         sys.exit(1)
@@ -258,23 +264,23 @@ def parseArguments(ctx):
     B_files = B_files.strip()
 
     if B_files != "":
-        if not os.path.exists(B_files):
+        if not op.exists(B_files):
             log.error("ERROR: Subject cannot be run. Incorrect B-files directory specified: " +
                          B_files)
             sys.exit(1)
         else:
             for fil in ["AP.bval", "AP.bvec" , "PA.bval", "PA.bvec"]:
-                if not os.path.exists(B_files + '/' + fil ):
+                if not op.exists(op.join(B_files, fil)):
                     log.error("ERROR: Subject cannot be run. Non-existent B-file: " +
-                                 B_files + '/' + fil )
+                                 op.join(B_files, fil) )
                     sys.exit(1)
-        if not os.path.isabs(B_files):
-            B_files = os.path.abspath(B_files)
+        if not op.isabs(B_files):
+            B_files = op.abspath(B_files)
 
     # Parsing basic QC file argument
     basic_QC_file = argsa.basic_QC_file
     basic_QC_file = basic_QC_file.strip()
-    if not os.path.exists(basic_QC_file):
+    if not op.exists(basic_QC_file):
         log.error("ERROR: Subject cannot be run. Incorrect basic QC file specified: " +
                      basic_QC_file)
         sys.exit(1)
@@ -312,7 +318,7 @@ def main():
 
     parseArguments(ctx)
 
-    setup_logging(ctx.subject + '/logs/main.log')
+    setup_logging(op.join(ctx.subject, 'logs', 'main.log'))
 
     tree = FileTree.read(ctx.get_data('FileTree'), 
                          subject=ctx.subject,
diff --git a/bip/pipelines/IDPs_gen/IDP_SWI_T2star.py b/bip/pipelines/IDPs_gen/IDP_SWI_T2star.py
index 4226a1e1818d3a9d8af4d170e0b96fb7f6909e6d..910599531a2446157172fd92ca2a2b1465620529 100755
--- a/bip/pipelines/IDPs_gen/IDP_SWI_T2star.py
+++ b/bip/pipelines/IDPs_gen/IDP_SWI_T2star.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
@@ -28,7 +29,7 @@ def run(ctx,
 
         result = ("NaN " * 14).strip()
 
-        if os.path.exists(T1_first_all_fast_firstseg):
+        if op.exists(T1_first_all_fast_firstseg):
             v=wrappers.fslstats(T2star_to_T1, K=T1_first_all_fast_firstseg).p(50).run()
 
             # Check that the outputs are OK
diff --git a/bip/pipelines/IDPs_gen/IDP_T1_FIRST_vols.py b/bip/pipelines/IDPs_gen/IDP_T1_FIRST_vols.py
index 28d1fa2318836e91d8c94d541b5d3c87a438a808..3aed0f4ea9919bf7c5d6446cd396f95f43500471 100755
--- a/bip/pipelines/IDPs_gen/IDP_T1_FIRST_vols.py
+++ b/bip/pipelines/IDPs_gen/IDP_T1_FIRST_vols.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
@@ -26,7 +27,7 @@ def run(ctx,
     with redirect_logging(job_name(run), outdir=logs_dir):
         result = ("NaN " * 15).strip()
 
-        if os.path.exists(T1_first_all_fast_firstseg):
+        if op.exists(T1_first_all_fast_firstseg):
             v=wrappers.fslstats(T1_first_all_fast_firstseg).H(58,0.5,58.5).run()
             # Check that the outputs are OK
             if len(v) == 58:
diff --git a/bip/pipelines/IDPs_gen/IDP_T1_GM_parcellation.py b/bip/pipelines/IDPs_gen/IDP_T1_GM_parcellation.py
index d810c588faa7b18c35ba560688df14ef513ec172..e746cf73b46e0bc9d667f6ac29ce30bc3e03515f 100755
--- a/bip/pipelines/IDPs_gen/IDP_T1_GM_parcellation.py
+++ b/bip/pipelines/IDPs_gen/IDP_T1_GM_parcellation.py
@@ -9,6 +9,7 @@
 # pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
 #
 
+import os.path as op
 import logging
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
@@ -25,10 +26,11 @@ def run(ctx,
         IDP_T1_GM_parcellation:  Out):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-        tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
+
         result = ("NaN " * 139).strip()
 
-        GMatlas_to_T1 = tmp_dir + '/GMatlas_to_T1.nii.gz'
+        GMatlas_to_T1 = op.join(tmp_dir, 'GMatlas_to_T1.nii.gz')
         GMatlas = ctx.get_data("IDPs/GMatlas/GMatlas.nii.gz")
 
         wrappers.applywarp(src=GMatlas, ref=T1, w=T1_to_MNI_warp_coef_inv,
diff --git a/bip/pipelines/IDPs_gen/IDP_T1_SIENAX.py b/bip/pipelines/IDPs_gen/IDP_T1_SIENAX.py
index 02af2ef2a5f09e89d1655d0a99c86fd157da518d..bf275eb27d6ad57be80189fd2ce190546cb3f55e 100755
--- a/bip/pipelines/IDPs_gen/IDP_T1_SIENAX.py
+++ b/bip/pipelines/IDPs_gen/IDP_T1_SIENAX.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from shutil import copyfile
 from pipe_tree import In, Out, Ref
@@ -28,7 +29,7 @@ def run(ctx,
 
         result = ("NaN " * 11).strip()
 
-        if os.path.exists(T1_sienax_report):
+        if op.exists(T1_sienax_report):
             with open(T1_sienax_report, "r",  encoding="utf-8") as f:
                 text = f.readlines()
                 # indices that we are interested in
diff --git a/bip/pipelines/IDPs_gen/IDP_T1_align_to_std.py b/bip/pipelines/IDPs_gen/IDP_T1_align_to_std.py
index 92f265dbe07badd13982fbd0cf2e67a1b9902af1..c598f210e92a0a1c3392cef2cd38ae5e44422792 100755
--- a/bip/pipelines/IDPs_gen/IDP_T1_align_to_std.py
+++ b/bip/pipelines/IDPs_gen/IDP_T1_align_to_std.py
@@ -9,6 +9,7 @@
 # pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
 #
 
+import os.path as op
 import logging
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
@@ -26,12 +27,12 @@ def run(ctx,
         IDP_T1_align_to_std:  Out):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-        tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        tmp_jac = tmp_dir + '/tmpjac.nii.gz'
-        tmp_mat = tmp_dir + '/tmp_mat.mat'
+        tmp_jac = op.join(tmp_dir, 'tmpjac.nii.gz')
+        tmp_mat = op.join(tmp_dir, 'tmp_mat.mat')
 
-        MC=ctx.FSLDIR + '/etc/flirtsch/measurecost1.sch'
+        MC=ctx.FSLDIR + '/etc/flirtschop.join(tmp_dir, 'op.join(tmp_dir, 'measurecost1.sch'
         MNI152_T1_1mm_brain = ctx.get_standard("MNI152_T1_1mm_brain.nii.gz")
         MNI152_T1_1mm_brain_mask = ctx.get_standard("MNI152_T1_1mm_brain_mask.nii.gz")
 
diff --git a/bip/pipelines/IDPs_gen/IDP_T1_noise_ratio.py b/bip/pipelines/IDPs_gen/IDP_T1_noise_ratio.py
index eb8dc9e64ca33ce02e461dc455fb8c1d86a83222..90b1b6b1ec8cce86f6ef8587f0bb0d03ff3a9c36 100755
--- a/bip/pipelines/IDPs_gen/IDP_T1_noise_ratio.py
+++ b/bip/pipelines/IDPs_gen/IDP_T1_noise_ratio.py
@@ -10,6 +10,7 @@
 # pylint: disable=W0613
 #
 
+import os.path as op
 import logging
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
@@ -25,9 +26,9 @@ def run(ctx,
         IDP_T1_noise_ratio: Out):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-        tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        tmp_SNR = tmp_dir + '/tmp_SNR.nii.gz'
+        tmp_SNR = op.join(tmp_dir, 'tmp_SNR.nii.gz')
 
         wrappers.fslmaths(T1_fast_pveseg).thr(2).uthr(2).ero().run(tmp_SNR)
         grey  = wrappers.fslstats(T1).k(tmp_SNR).m.run()
diff --git a/bip/pipelines/IDPs_gen/IDP_all_align_to_T1.py b/bip/pipelines/IDPs_gen/IDP_all_align_to_T1.py
index ace1ba26773ca580a6a6c78cd5ba8cc1ef2813ae..dbfac5e3cda68d783157633fdc5e3f949cba1a43 100755
--- a/bip/pipelines/IDPs_gen/IDP_all_align_to_T1.py
+++ b/bip/pipelines/IDPs_gen/IDP_all_align_to_T1.py
@@ -10,6 +10,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
@@ -30,18 +31,18 @@ def run(ctx,
         IDP_all_align_to_T1:        Out):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-        tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        tmp_mat = tmp_dir + '/tmp_mat.mat'
+        tmp_mat = op.join(tmp_dir, 'tmp_mat.mat')
 
         result=""
 
-        MC=ctx.FSLDIR + '/etc/flirtsch/measurecost1.sch'
+        MC = op.join(ctx.FSLDIR, 'etc', 'flirtsch', 'measurecost1.sch')
 
         for file_name in [T2_FLAIR_brain, fieldmap_iout_to_T1, SWI_to_T1,
                          rfMRI_example_func2highres,tfMRI_example_func2highres]:
 
-            if os.path.exists(file_name):
+            if op.exists(file_name):
                 costs1 = wrappers.flirt(src=file_name, ref=T1_brain,
                                         refweight=T1_brain_mask, schedule=MC,
                                         omat=tmp_mat).stdout[0].strip()
diff --git a/bip/pipelines/IDPs_gen/IDP_diff_TBSS.py b/bip/pipelines/IDPs_gen/IDP_diff_TBSS.py
index c5f60043317792461ff8072ad19524f38bd0b35f..b12d1c2ec2c63c84b3137e56f1cd227edef05ad6 100755
--- a/bip/pipelines/IDPs_gen/IDP_diff_TBSS.py
+++ b/bip/pipelines/IDPs_gen/IDP_diff_TBSS.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from pipe_tree import In, Out, Ref
 from bip.utils.log_utils import redirect_logging, job_name
@@ -33,7 +34,7 @@ def run(ctx,
 
             file_name = JHUrois_prefix + mod + ".txt"
             print(file_name)
-            if os.path.exists(file_name):
+            if op.exists(file_name):
                 with open(file_name, "r", encoding="utf-8") as f:
                     mini_result = f.read()
                     mini_result = mini_result.replace("\n", " ")
diff --git a/bip/pipelines/IDPs_gen/IDP_diff_autoPtx.py b/bip/pipelines/IDPs_gen/IDP_diff_autoPtx.py
index fece943161cd2957c74932bbd7f65dc5dc04a605..eb2751059a8d7d0b0ae29e60da92aa8d1e3ae348 100755
--- a/bip/pipelines/IDPs_gen/IDP_diff_autoPtx.py
+++ b/bip/pipelines/IDPs_gen/IDP_diff_autoPtx.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import logging
 import nibabel as nib
 from fsl import wrappers
@@ -29,10 +30,10 @@ def run(ctx,
         IDP_diff_autoPtx:   Out):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-        tempdir(tmp_dir + "/" + __name__):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        autoPtx_all = tmp_dir + '/autoPtx_all.nii.gz'
-        autoPtx_tmp = tmp_dir + '/autoPtx_tmp.nii.gz'
+        autoPtx_all = op.join(tmp_dir, 'autoPtx_all.nii.gz')
+        autoPtx_tmp = op.join(tmp_dir, 'autoPtx_tmp.nii.gz')
 
         result_NaN = ("NaN " * 27).strip()
         result = ""
@@ -45,7 +46,7 @@ def run(ctx,
             all_file = TBSS_prefix + suffix + ".nii.gz"
             APTX_file = aptx_txt_prefix + suffix + ".txt"
 
-            if os.path.exists(all_file):
+            if op.exists(all_file):
                 wrappers.fslmaths(autoPtx_all).mul(all_file).run(autoPtx_tmp)
                 tractmeas = wrappers.fslstats(autoPtx_tmp, t=True).m.run()
 
diff --git a/bip/pipelines/IDPs_gen/IDP_diff_eddy_outliers.py b/bip/pipelines/IDPs_gen/IDP_diff_eddy_outliers.py
index 6514adbfd733eb4d3a34e8136a03b0f5fbdfd3da..a27c5d7352fd01487cd57fb15ce047ffab6add0a 100755
--- a/bip/pipelines/IDPs_gen/IDP_diff_eddy_outliers.py
+++ b/bip/pipelines/IDPs_gen/IDP_diff_eddy_outliers.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from pipe_tree import In, Out, Ref
 from bip.utils.log_utils import redirect_logging, job_name
@@ -26,7 +27,7 @@ def run(ctx,
 
         num_outliers = 0
 
-        if os.path.exists(eddy_outlier_report):
+        if op.exists(eddy_outlier_report):
             with open(eddy_outlier_report, "r", encoding="utf-8") as f:
                 num_outliers = str(len(f.readlines()))
         else:
diff --git a/bip/pipelines/IDPs_gen/IDP_func_TSNR.py b/bip/pipelines/IDPs_gen/IDP_func_TSNR.py
index 249417dbdfe78bd32abb077f78c4ec66edc2a40d..9c68babe0a713b051f0510feceaf930c7f8be915 100755
--- a/bip/pipelines/IDPs_gen/IDP_func_TSNR.py
+++ b/bip/pipelines/IDPs_gen/IDP_func_TSNR.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
@@ -27,15 +28,15 @@ def run(ctx,
         IDP_func_TSNR:            Out):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-        tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        fMRI_SNR = tmp_dir + '/fMRI_SNR.nii.gz'
+        fMRI_SNR = op.join(tmp_dir, 'fMRI_SNR.nii.gz')
 
         result =""
 
         for file_name in [filtered_func_data, filtered_func_data_clean,
                           tfMRI_filtered_func_data]:
-            if os.path.exists(file_name):
+            if op.exists(file_name):
                 wrappers.fslmaths(file_name).Tstd().run(fMRI_SNR)
                 wrappers.fslmaths(file_name).Tmean().div(fMRI_SNR).run(fMRI_SNR)
                 v = 1 / wrappers.fslstats(fMRI_SNR).l(0.1).p(50).run()
diff --git a/bip/pipelines/IDPs_gen/IDP_func_head_motion.py b/bip/pipelines/IDPs_gen/IDP_func_head_motion.py
index 8bf3a0c4fd827d6b148df4b5340cec67db44d4d3..ec5c137c5f89bbfa002ecc34bb47c947c3dab939 100755
--- a/bip/pipelines/IDPs_gen/IDP_func_head_motion.py
+++ b/bip/pipelines/IDPs_gen/IDP_func_head_motion.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import json
 import logging
 from pipe_tree import In, Out, Ref
@@ -26,13 +27,13 @@ def run(ctx,
 
     with redirect_logging(job_name(run), outdir=logs_dir):
 
-        if os.path.exists(rfMRI_mc_rel_mean):
+        if op.exists(rfMRI_mc_rel_mean):
             with open(rfMRI_mc_rel_mean, "r", encoding="utf-8") as f:
                 val_1 = json.load(f)
         else:
             val_1 = "NaN"
 
-        if os.path.exists(tfMRI_mc_rel_mean):
+        if op.exists(tfMRI_mc_rel_mean):
             with open(tfMRI_mc_rel_mean, "r", encoding="utf-8") as f:
                 val_2 = json.load(f)
         else:
diff --git a/bip/pipelines/IDPs_gen/IDP_func_task_activation.py b/bip/pipelines/IDPs_gen/IDP_func_task_activation.py
index d0263354325f6c4069882ac1b3525d1e34727bc6..c713241c476909a4a10c8b23714d22f5fcefc734 100755
--- a/bip/pipelines/IDPs_gen/IDP_func_task_activation.py
+++ b/bip/pipelines/IDPs_gen/IDP_func_task_activation.py
@@ -10,6 +10,7 @@
 #
 
 import os
+import os.path as op
 import shutil
 import logging
 from fsl import wrappers
@@ -42,17 +43,19 @@ def run(ctx,
         IDP_func_task_activation:  Out):
 
     with redirect_logging(job_name(run), outdir=logs_dir):
-        if not os.path.exists(highres2standard_warp):
-            os.symlink(src=os.getcwd() + "/" + T1_to_MNI_warp,
-                       dst=highres2standard_warp)
+        
+        if not op.exists(highres2standard_warp):
+            rel_path = op.relpath(T1_to_MNI_warp, highres2standard_warp)
+            os.symlink(src=rel_path, dst=highres2standard_warp)
 
-        if not os.path.exists(highres2standard_warp_inv):
-            os.symlink(src=os.getcwd() + "/" + T1_to_MNI_warp_coef_inv,
-                       dst=highres2standard_warp_inv)
+        if not op.exists(highres2standard_warp_inv):
+            rel_path = op.relpath(T1_to_MNI_warp_coef_inv, 
+                                  highres2standard_warp_inv)
+            os.symlink(src=rel_path, dst=highres2standard_warp_inv)
 
         for dir_name in [tfMRI_featquery_1_dir, tfMRI_featquery_2_dir,
                          tfMRI_featquery_5_dir, tfMRI_featquery_5a_dir]:
-            if os.path.exists(dir_name):
+            if op.exists(dir_name):
                 shutil.rmtree(dir_name)
 
         group_mask_1  = ctx.get_data("tfMRI_group/groupMask1.nii.gz")
@@ -60,7 +63,7 @@ def run(ctx,
         group_mask_5  = ctx.get_data("tfMRI_group/groupMask5.nii.gz")
         group_mask_5a = ctx.get_data("tfMRI_group/groupMask5a.nii.gz")
 
-        tfMRI_feat += "/"
+        tfMRI_feat += os.sep
 
         N_tfMRI_cope1 = tfMRI_cope1.replace(tfMRI_feat, "")
         N_tfMRI_cope2 = tfMRI_cope2.replace(tfMRI_feat, "")
diff --git a/bip/pipelines/IDPs_gen/IDP_subject_COG_table.py b/bip/pipelines/IDPs_gen/IDP_subject_COG_table.py
index 94ddb79cf2112de177eadad4cd50b93361594893..ef09016ce0c751782d74a55cdcb53f291a19813f 100755
--- a/bip/pipelines/IDPs_gen/IDP_subject_COG_table.py
+++ b/bip/pipelines/IDPs_gen/IDP_subject_COG_table.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from pipe_tree import In, Out, Ref
 from bip.utils.log_utils import redirect_logging, job_name
@@ -39,7 +40,7 @@ def run(ctx,
         for file_name in [T1_dcm_txt, T2_dcm_txt, rfMRI_dcm_txt, SWI_dcm_txt,
                           dMRI_dcm_txt, tfMRI_dcm_txt]:
 
-            if os.path.exists(file_name):
+            if op.exists(file_name):
                 with open(file_name, 'rt', encoding="utf-8") as f:
                     cont = f.readlines()
                     for x in cont:
diff --git a/bip/pipelines/IDPs_gen/IDP_subject_centre.py b/bip/pipelines/IDPs_gen/IDP_subject_centre.py
index 80a077c493d0f600336fcca22401b4cad052662b..c18932409b999891e822b8adafd5d5bfcf97e473 100755
--- a/bip/pipelines/IDPs_gen/IDP_subject_centre.py
+++ b/bip/pipelines/IDPs_gen/IDP_subject_centre.py
@@ -10,6 +10,7 @@
 #
 
 import os
+import os.path as op
 import json
 import logging
 from pipe_tree import In, Out, Ref
@@ -35,7 +36,7 @@ def run(ctx,
         for file_name in [T1_dcm_txt, T2_dcm_txt, rfMRI_dcm_txt, SWI_dcm_txt,
                           dMRI_dcm_txt, tfMRI_dcm_txt]:
 
-            if os.path.exists(file_name):
+            if op.exists(file_name):
                 with open(file_name, 'rt', encoding="utf-8") as f:
                     cont = f.readlines()
 
diff --git a/bip/pipelines/IDPs_gen/IDPs_generator.py b/bip/pipelines/IDPs_gen/IDPs_generator.py
index 9e7c9ca0050b7ca14ad61ffc9e62556eaf8d0958..a4ba81554d3f6ed4bbb79e724b850b5f46c6afe6 100755
--- a/bip/pipelines/IDPs_gen/IDPs_generator.py
+++ b/bip/pipelines/IDPs_gen/IDPs_generator.py
@@ -10,6 +10,7 @@
 #
 
 import os
+import os.path as op
 import json
 import logging
 from pipe_tree import In, Out, Ref
@@ -53,11 +54,11 @@ def run(ctx,
                      IDP_func_task_activation, IDP_diff_TBSS, IDP_diff_autoPtx,
                      IDP_subject_COG_table, IDP_subject_centre]:
 
-            plain_name = os.path.basename(IDP_file).replace(".txt", "")
+            plain_name = op.basename(IDP_file).replace(".txt", "")
             num_IDPs = len(IDPs_dict[plain_name])
             result_nans = ("NaN " * num_IDPs).strip()
 
-            if os.path.exists(IDP_file):
+            if op.exists(IDP_file):
                 with open(IDP_file, "r", encoding="utf-8") as f:
                     IDPs_l = f.read().strip().split()
                 if len(IDPs_l) != num_IDPs:
diff --git a/bip/pipelines/dMRI_diff/diff_autoptx.py b/bip/pipelines/dMRI_diff/diff_autoptx.py
index 6b2629bee19917d9a5a6bf00bad267acc873398e..adc53b83c8b122068baa8e8ba94f2d021729743b 100755
--- a/bip/pipelines/dMRI_diff/diff_autoptx.py
+++ b/bip/pipelines/dMRI_diff/diff_autoptx.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import random
 import logging
 from shutil import copyfile
@@ -54,9 +55,9 @@ def run(ctx,
 
         # Does the protocol defines a second
         # run with inverted seed / target masks?
-        if os.path.exists(orig_tract_dir + 'invert'):
+        if op.exists(orig_tract_dir + 'invert'):
             symtrack=True
-            if os.path.exists(waytotal):
+            if op.exists(waytotal):
                 os.remove(waytotal)
         else:
             symtrack=False
@@ -66,7 +67,7 @@ def run(ctx,
         copyfile(orig_tract_dir + 'target.nii.gz',  aptx_target)
         copyfile(orig_tract_dir + 'exclude.nii.gz', aptx_exclude)
 
-        if os.path.exists(waytotal):
+        if op.exists(waytotal):
             os.remove(waytotal)
 
         # Building arguments dictionary for probtrackx
@@ -87,7 +88,7 @@ def run(ctx,
 
         # Is there a stop criterion defined
         # in the protocol for this struct?
-        if os.path.exists(orig_tract_dir + 'stop.nii.gz'):
+        if op.exists(orig_tract_dir + 'stop.nii.gz'):
             kwargs['stop'] = aptx_stop
             copyfile(orig_tract_dir + 'stop.nii.gz', aptx_stop)
 
@@ -115,7 +116,7 @@ def run(ctx,
                 way1 = int(f.read())
             with open(waytotal_inv, 'r', encoding="utf-8") as f:
                 way2 = int(f.read())
-            if os.path.exists(waytotal):
+            if op.exists(waytotal):
                 os.remove(waytotal)
             with open(waytotal, 'wt', encoding="utf-8") as f:
                 way = way1 + way2
diff --git a/bip/pipelines/dMRI_diff/diff_bedpostx.py b/bip/pipelines/dMRI_diff/diff_bedpostx.py
index 8b4aab4b60d3b31a17460361bd2d68e8e781c790..0fabdf158e105304f2ec4da31152e3c8ba6736e6 100755
--- a/bip/pipelines/dMRI_diff/diff_bedpostx.py
+++ b/bip/pipelines/dMRI_diff/diff_bedpostx.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from shutil import copyfile
 from fsl import wrappers
@@ -40,14 +41,15 @@ def run(ctx,
 
     with redirect_logging(job_name(run), outdir=logs_dir):
 
-        w = os.getcwd() + "/"
+        w = os.getcwd() + os.sep
 
         # Copy of bvals and bvecs
         copyfile(src=AP_bval, dst=bedpostx_bvals)
         copyfile(src=AP_bvec, dst=bedpostx_bvecs)
         copyfile(src=eddy_nodif_brain_mask, dst=bedpostx_nodif_brain_mask)
 
-        wrappers.fslmaths(eddy_nodif).mas(eddy_nodif_brain_mask).run(bedpostx_nodif_brain)
+        wrappers.fslmaths(eddy_nodif).mas(eddy_nodif_brain_mask).\
+            run(bedpostx_nodif_brain)
 
         nvox=wrappers.fslstats(bedpostx_nodif_brain_mask).V.run()[0]
         with open(bedpostx_nvox, 'wt', encoding="utf-8") as f:
@@ -81,5 +83,5 @@ def run(ctx,
                                        SubjectDir=w + eddy_dir,
                                        TotalNumParts="1", bindir=ctx.FSLDIR)
 
-        if os.path.exists(bedpostx_data_0):
+        if op.exists(bedpostx_data_0):
             os.remove(bedpostx_data_0)
diff --git a/bip/pipelines/dMRI_diff/diff_dtifit.py b/bip/pipelines/dMRI_diff/diff_dtifit.py
index ea4cc46f2611c0a43e9f5b3ba979cd0f24fb9b0c..a6a900e8b8d01745b004aaf3c5b51b4a75da6370 100755
--- a/bip/pipelines/dMRI_diff/diff_dtifit.py
+++ b/bip/pipelines/dMRI_diff/diff_dtifit.py
@@ -10,6 +10,7 @@
 # pylint: disable=W0613
 #
 
+import os.path as op
 import logging
 from shutil import copyfile
 import numpy as np
diff --git a/bip/pipelines/dMRI_diff/diff_eddy.py b/bip/pipelines/dMRI_diff/diff_eddy.py
index a47794e9a85c59414fa8b71ae7c2357f0dd011cf..38d160cbc63bb6b27d30ba7d4a9b95d7c65d8044 100755
--- a/bip/pipelines/dMRI_diff/diff_eddy.py
+++ b/bip/pipelines/dMRI_diff/diff_eddy.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from shutil import copyfile
 import nibabel as nib
@@ -47,14 +48,18 @@ def run(ctx,
 
         # Creates links
         # TODO: These links are NOT relative. This may cause future problems.
-        if not os.path.exists(eddy_AP):
-            os.symlink(src="../../../" + AP, dst=eddy_AP)
-        if not os.path.exists(eddy_nodif):
-            os.symlink(src="../../../" + fieldmap_iout_mean, dst=eddy_nodif)
-        if not os.path.exists(eddy_nodif_brain_mask):
-            os.symlink(src="../../../" + fieldmap_mask, dst=eddy_nodif_brain_mask)
-        if not os.path.exists(eddy_nodif_brain_mask_ud):
-            os.symlink(src="../../../" + fieldmap_mask_ud, dst=eddy_nodif_brain_mask_ud)
+        if not op.exists(eddy_AP):
+            rel_path = op.relpath(AP, eddy_AP)
+            os.symlink(src=rel_path, dst=eddy_AP)
+        if not op.exists(eddy_nodif):
+            rel_path = op.relpath(fieldmap_iout_mean, eddy_nodif)
+            os.symlink(src=rel_path, dst=eddy_nodif)
+        if not op.exists(eddy_nodif_brain_mask):
+            rel_path = op.relpath(fieldmap_mask, eddy_nodif_brain_mask)
+            os.symlink(src=rel_path, dst=eddy_nodif_brain_mask)
+        if not op.exists(eddy_nodif_brain_mask_ud):
+            rel_path = op.relpath(fieldmap_mask_ud, eddy_nodif_brain_mask_ud)
+            os.symlink(src=rel_path, dst=eddy_nodif_brain_mask_ud)
 
         copyfile(src=AP_bval, dst=eddy_bvals)
         copyfile(src=AP_bvec, dst=eddy_bvecs)
diff --git a/bip/pipelines/dMRI_diff/diff_noddi.py b/bip/pipelines/dMRI_diff/diff_noddi.py
index 1ef59a233b8526bea52291ef5325d79eb1f7e84b..95e6c5d9cce63d4258d6b985318fc9def12ec5e9 100755
--- a/bip/pipelines/dMRI_diff/diff_noddi.py
+++ b/bip/pipelines/dMRI_diff/diff_noddi.py
@@ -10,6 +10,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from shutil import move, rmtree
 import zipfile
@@ -39,16 +40,18 @@ def run(ctx,
         NODDI_dir_file:            Out):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-         tempdir(tmp_dir):
-        amtmp      = tmp_dir + '/amtmp.nii'
-        amtmp_mask = tmp_dir + '/amtmp_mask.nii'
+         tempdir(op.join(tmp_dir, job_name(run))):
+         
+        amtmp      = op.join(tmp_dir, 'amtmp.nii')
+        amtmp_mask = op.join(tmp_dir, 'amtmp_mask.nii')
 
         # In case the kernels were already calculated for this study:
         # TODO: Verify that this is regular UKB data.
         # Otherwise, this should not be done
-        if not os.path.exists('./kernels/'):
-            if os.path.exists(ctx.get_data('dMRI/kernels.zip')):
-                with zipfile.ZipFile(ctx.get_data('dMRI/kernels.zip'), 'r') as zip_ref:
+        if not op.exists('./kernels/'):
+            if op.exists(ctx.get_data('dMRI/kernels.zip')):
+                with zipfile.ZipFile(ctx.get_data('dMRI/kernels.zip'), 
+                                     'r') as zip_ref:
                     zip_ref.extractall("./")
 
         with gzip.open(eddy_data_ud, 'rb') as f_in:
@@ -78,10 +81,10 @@ def run(ctx,
 
         # Move files to proper place
         for file_n in files:
-            old_name = file_n.replace("/NODDI_","/FIT_")
+            old_name = file_n.replace(os.sep + "NODDI_", os.sep + "FIT_")
             move(old_name, file_n)
 
         # Delete created folders
         for dir_to_delete in [AMICO_dir, NODDI_kernels_dir]:
-            if os.path.exists(dir_to_delete):
+            if op.exists(dir_to_delete):
                 rmtree(dir_to_delete)
diff --git a/bip/pipelines/dMRI_diff/diff_tbss.py b/bip/pipelines/dMRI_diff/diff_tbss.py
index db2dc5b6c9810a6cc32f5ba1b2f0fed35e48ed60..4d0c0830d3e60c710e5b9ea3e1b9f7837783d53f 100755
--- a/bip/pipelines/dMRI_diff/diff_tbss.py
+++ b/bip/pipelines/dMRI_diff/diff_tbss.py
@@ -11,9 +11,11 @@
 #
 
 import os
+import os.path as op
 import logging
 from shutil import copyfile
 import nibabel as nib
+import numpy as np
 from fsl import wrappers
 from bip.utils.log_utils import redirect_logging, tempdir, job_name
 from pipe_tree import In, Out, Ref
@@ -52,30 +54,33 @@ def run(ctx,
         JHUrois_FA:                  Out):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-         tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        TBSS_FA_tmp = tmp_dir + '/TBSS_FA_tmp.nii.gz'
-        TBSS_FA_to_MNI_warp_s1 = tmp_dir + '/FA_to_MNI_warp_s1.nii.gz'
-        TBSS_FA_to_MNI_warp_s2 = tmp_dir + '/FA_to_MNI_warp_s2.nii.gz'
+        TBSS_FA_tmp            = op.join(tmp_dir, 'TBSS_FA_tmp.nii.gz')
+        TBSS_FA_to_MNI_warp_s1 = op.join(tmp_dir, 'FA_to_MNI_warp_s1.nii.gz')
+        TBSS_FA_to_MNI_warp_s2 = op.join(tmp_dir, 'FA_to_MNI_warp_s2.nii.gz')
 
         # Creates links
-        # TODO: Do the cool OS.PATH.RELPATH
-        if not os.path.exists(TBSS_FA):
-            os.symlink(src="../../../" + FA, dst=TBSS_FA)
-        if not os.path.exists(TBSS_MNI):
-            os.symlink(src=ctx.get_standard('FMRIB58_FA_1mm.nii.gz'), dst=TBSS_MNI)
+        if not op.exists(TBSS_FA):
+            rel_path = op.relpath(FA, TBSS_FA)
+            os.symlink(src=rel_path, dst=TBSS_FA)
+        if not op.exists(TBSS_MNI):
+            os.symlink(src=ctx.get_standard('FMRIB58_FA_1mm.nii.gz'),
+                       dst=TBSS_MNI)
         FA_img     = nib.load(TBSS_FA)
         x, y, z = FA_img.header['dim'][1:4]
 
         # erode a little and zero end slices
-        wrappers.fslmaths(FA).min(1).ero().roi(1,x,1,y,1,z,0,1).run(TBSS_FA_dir_FA)
+        wrappers.fslmaths(FA).min(1).ero().roi(1,x,1,y,1,z,0,1).\
+            run(TBSS_FA_dir_FA)
 
         # create mask (for use in FLIRT & FNIRT)
         wrappers.fslmaths(TBSS_FA_dir_FA).bin().run(TBSS_FA_dir_FA_mask)
         #TODO: investigate how to add the -odt=char option
-        wrappers.fslmaths(TBSS_FA_dir_FA_mask).dilD(2).sub(1).abs().add(TBSS_FA_dir_FA_mask).run(TBSS_FA_dir_FA_mask)
+        wrappers.fslmaths(TBSS_FA_dir_FA_mask).dilD(2).sub(1).abs().\
+            add(TBSS_FA_dir_FA_mask).run(TBSS_FA_dir_FA_mask, odt="char")
 
-        if not os.path.exists(TBSS_MNI_to_dti_FA_warp_msf):
+        if not op.exists(TBSS_MNI_to_dti_FA_warp_msf):
             #New Optimal Registration
             wrappers.flirt(ref=TBSS_MNI,
                            src=TBSS_FA_dir_FA,
@@ -105,7 +110,8 @@ def run(ctx,
 
 
             # now estimate the mean deformation
-            wrappers.fslmaths(TBSS_FA_to_MNI_warp).sqr().Tmean().run(TBSS_FA_tmp)
+            wrappers.fslmaths(TBSS_FA_to_MNI_warp).sqr().Tmean().\
+                run(TBSS_FA_tmp)
             mean    = wrappers.fslstats(TBSS_FA_tmp).M.run()
             median  = wrappers.fslstats(TBSS_FA_tmp).P(50).run()
 
@@ -120,16 +126,20 @@ def run(ctx,
 
         # Create mean FA
         skel = ctx.get_standard('FMRIB58_FA-skeleton_1mm.nii.gz')
-        wrappers.fslmaths(TBSS_all_FA).bin().mul(TBSS_MNI).bin().run(TBSS_mean_FA_mask)
+        wrappers.fslmaths(TBSS_all_FA).bin().mul(TBSS_MNI).bin().\
+            run(TBSS_mean_FA_mask)
         wrappers.fslmaths(TBSS_all_FA).mas(TBSS_mean_FA_mask).run(TBSS_all_FA)
         wrappers.fslmaths(TBSS_MNI).mas(TBSS_mean_FA_mask).run(TBSS_mean_FA)
-        wrappers.fslmaths(skel).mas(TBSS_mean_FA_mask).run(TBSS_mean_FA_skeleton)
+        wrappers.fslmaths(skel).mas(TBSS_mean_FA_mask).\
+            run(TBSS_mean_FA_skeleton)
 
         # Create Skeleton mask
         # TODO: This threshold could be configurable by user
         thresh = 2000
-        wrappers.fslmaths(TBSS_mean_FA_skeleton).thr(thresh).bin().run(TBSS_mean_FA_skeleton_mask)
-        wrappers.fslmaths(TBSS_all_FA).mas(TBSS_mean_FA_skeleton_mask).run(TBSS_all_FA_skeletonised)
+        wrappers.fslmaths(TBSS_mean_FA_skeleton).thr(thresh).bin().\
+            run(TBSS_mean_FA_skeleton_mask)
+        wrappers.fslmaths(TBSS_all_FA).mas(TBSS_mean_FA_skeleton_mask).\
+            run(TBSS_all_FA_skeletonised)
 
         atlas = ctx.get_data('/dMRI/TBSS/JHU-ICBM-labels-1mm.nii.gz')
         mean = wrappers.fslstats(TBSS_all_FA_skeletonised, K=atlas).M.run()
@@ -149,14 +159,15 @@ def run(ctx,
 
                 d_input = prefix + suffix + ".nii.gz"
 
-                if os.path.exists(d_input):
+                if op.exists(d_input):
                     d_output      = TBSS_prefix + suffix +".nii.gz"
                     d_output_skel = TBSS_prefix + suffix +"_skeletonised.nii.gz"
                     d_output_txt  = JHUrois_prefix + suffix + '.txt'
                     wrappers.applywarp(src=d_input, out=d_output, ref=TBSS_MNI,
                                        warp=TBSS_FA_to_MNI_warp, rel=True,
                                        interp='trilinear')
-                    wrappers.fslmaths(d_output).mas(TBSS_mean_FA_skeleton_mask).run(d_output_skel)
+                    wrappers.fslmaths(d_output).\
+                        mas(TBSS_mean_FA_skeleton_mask).run(d_output_skel)
                     mean = wrappers.fslstats( d_output_skel, K=atlas).M.run()
 
                     np.savetxt(d_output_txt, mean, fmt="%0.8f")
diff --git a/bip/pipelines/dMRI_fieldmap/fieldmap_post_topup.py b/bip/pipelines/dMRI_fieldmap/fieldmap_post_topup.py
index 89631af1fd229709d7e99af8e9357af70919b483..9036ed1d318fed4f899e51dd16ade459f5fe0f6a 100755
--- a/bip/pipelines/dMRI_fieldmap/fieldmap_post_topup.py
+++ b/bip/pipelines/dMRI_fieldmap/fieldmap_post_topup.py
@@ -9,6 +9,7 @@
 # pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
 #
 
+import os.path as op
 import logging
 from shutil import copyfile
 import numpy as np
@@ -49,13 +50,13 @@ def run(ctx,
         fieldmap_iout_mean_ud_inv_warp: Out):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-               tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        B0_AP_corr_tmp  = tmp_dir + '/B0_AP_corr_tmp.nii.gz'
-        B0_PA_corr_tmp  = tmp_dir + '/B0_PA_corr_tmp.nii.gz'
-        B0_AP_fixed_tmp = tmp_dir + '/B0_AP_fixed_tmp.nii.gz'
-        B0_PA_fixed_tmp = tmp_dir + '/B0_PA_fixed_tmp.nii.gz'
-        fieldmap_tmp    = tmp_dir + '/fieldmap_tmp.nii.gz'
+        B0_AP_corr_tmp  = op.join(tmp_dir, 'B0_AP_corr_tmp.nii.gz')
+        B0_PA_corr_tmp  = op.join(tmp_dir, 'B0_PA_corr_tmp.nii.gz')
+        B0_AP_fixed_tmp = op.join(tmp_dir, 'B0_AP_fixed_tmp.nii.gz')
+        B0_PA_fixed_tmp = op.join(tmp_dir, 'B0_PA_fixed_tmp.nii.gz')
+        fieldmap_tmp    = op.join(tmp_dir, 'fieldmap_tmp.nii.gz')
 
         #Get corrected B=0 for AP and PA and average them avoiding zero values
         wrappers.applytopup(imain=B0_AP, datain=acqparams, index=1,
diff --git a/bip/pipelines/dMRI_fieldmap/fieldmap_pre_topup.py b/bip/pipelines/dMRI_fieldmap/fieldmap_pre_topup.py
index 6dd197045615526fa68e9dbfa326d2eec9dc7b4a..0841b88edc8ac4cd487302f77ba3842e7c3154e8 100755
--- a/bip/pipelines/dMRI_fieldmap/fieldmap_pre_topup.py
+++ b/bip/pipelines/dMRI_fieldmap/fieldmap_pre_topup.py
@@ -7,158 +7,20 @@
 # Author: Michiel Cottaar <michiel.cottaar@ndcn.ox.ac.uk>
 #
 # pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
-# pylint: disable=W0613,R0912
+# pylint: disable=W0613,R0912,C0123
 #
 
-import glob
+import os.path as op
 import logging
-from shutil import copyfile
-import numpy as np
 import nibabel as nib
 from fsl import wrappers
-from bip.utils.log_utils       import redirect_logging, tempdir, job_name
-from bip.utils.get_b0s         import get_b0s
-from bip.utils.get_dwell_time  import get_dwell_time
-from bip.utils.read_json_field import read_json_field
 from pipe_tree import In, Out, Ref
-
+from bip.utils.log_utils       import redirect_logging, tempdir, job_name
+from bip.pipelines.dMRI_fieldmap.fieldmap_pre_topup_fnc import choose_best_B0
+from bip.pipelines.dMRI_fieldmap.fieldmap_pre_topup_fnc import generate_acqparams
 
 log = logging.getLogger(__name__)
 
-def choose_best_B0(ctx, img, bval, total_B0, indices, tmp, best_index, B0,
-                   fieldmap_flag, tmp_dir):
-
-    blah     = tmp_dir + '/blah.nii.gz'
-    blah_mat = tmp_dir + '/blah.mat'
-
-    b0_threshold=int(np.loadtxt(ctx.get_data('dMRI/b0_threshold.txt')))
-
-    bvals = np.loadtxt(bval)
-
-    if type(bvals) is not np.ndarray:
-        num_vols = 1
-    else:
-        num_vols = len(np.where(bvals <= b0_threshold)[0])
-
-    get_b0s(img, bval, total_B0, indices, num_vols,  b0_threshold)
-    ind_values = [int(x) for x in np.loadtxt(indices)]
-
-    wrappers.fslsplit(total_B0, tmp)
-
-    files = glob.glob(tmp + '*')
-    files.sort()
-
-    N = len(files)
-
-    if N == 1:
-        with open(best_index, 'wt', encoding="utf-8") as f:
-            f.write('0\n')
-        copyfile(src=total_B0, dst=B0)
-    else:
-        scores= np.zeros([N,N])
-        for i in range(N):
-            for j in range(N):
-                if j > i:
-                    wrappers.flirt(src=files[i], ref=files[j], nosearch=True,
-                                   dof=6, out=blah, omat=blah_mat)
-                    im1  = nib.load(blah)
-                    im2  = nib.load(files[j])
-                    im1f = im1.get_fdata().flatten()
-                    im2f = im2.get_fdata().flatten()
-                    corr = np.corrcoef(im1f,im2f)[0,1]
-                    scores[i,j] = corr
-                    scores[j,i] = corr
-
-        # TODO: Do in one line
-        #final_scores = np.zeros(N)
-        #for i in range(N):
-        #    final_scores[i]=np.sum(scores[:,i]) / (N - 1)
-        final_scores = np.sum(scores, axis=1) / (N - 1)
-
-        best_ind = final_scores.argmax()
-        best_score = final_scores[best_ind]
-
-        if final_scores[0] > 0.98:
-            best_ind = 0
-            best_score = final_scores[best_ind]
-        if best_score < 0.95:
-            with open(fieldmap_flag, 'wt', encoding="utf-8") as f:
-                f.write(f'The best score was {best_score}.' +
-                 'The subject will probably have problems with this DWI data\n')
-        with open(best_index, 'wt', encoding="utf-8") as f:
-            f.write(f'{ind_values[best_ind]}\n')
-            copyfile(src=files[best_ind], dst=B0)
-
-
-def generate_acqparams(AP, PA, AP_json, PA_json, numAP, numPA, acqparams):
-
-    enc_dir_AP = read_json_field(fileName=AP_json,
-                                 fieldName="PhaseEncodingDirection")
-    enc_dir_PA = read_json_field(fileName=PA_json,
-                                 fieldName="PhaseEncodingDirection")
-
-    # TODO: Write a way to evaluate that they are indeed opposite directions
-
-    # Generating the specific strings for the acqparams file for AP
-    # according to the Phase Encoding Direction in the AP json file
-    if enc_dir_AP == "j-":
-        dim_AP = 2
-        values_AP = "0 -1 0"
-    elif enc_dir_AP == "j":
-        dim_AP = 2
-        values_AP = "0 1 0"
-    elif enc_dir_AP == "i-":
-        dim_AP = 1
-        values_AP = "-1 0 0"
-    elif enc_dir_AP == "i":
-        dim_AP = 1
-        values_AP = "1 0 0"
-    elif enc_dir_AP == "k-":
-        dim_AP = 3
-        values_AP = "0 0 -1"
-    else:
-        dim_AP = 3
-        values_AP = "0 0 1"
-
-    # Generating the specific strings for the acqparams file for PA
-    # according to the Phase Encoding Direction in the PA json file
-    if enc_dir_PA == "j-":
-        #dim_PA = 2
-        values_PA = "0 -1 0"
-    elif enc_dir_PA == "j":
-        #dim_PA = 2
-        values_PA = "0 1 0"
-    elif enc_dir_PA == "i-":
-        #dim_PA = 1
-        values_PA = "-1 0 0"
-    elif enc_dir_PA == "i":
-        #dim_PA = 1
-        values_PA = "1 0 0"
-    elif enc_dir_PA == "k-":
-        #dim_PA = 3
-        values_PA = "0 0 -1"
-    else:
-        #dim_PA = 3
-        values_PA = "0 0 1"
-
-
-    imAP = nib.load(AP)
-    numlines = imAP.header['dim'][dim_AP]
-
-    dtiDwell = get_dwell_time(AP, AP_json)
-
-    topupValue = (dtiDwell * (numlines -1)) / 1000.0
-
-    values_AP = values_AP + " " + str(topupValue) + "\n"
-    values_PA = values_PA + " " + str(topupValue) + "\n"
-
-    with open(acqparams, 'wt', encoding="utf-8") as f:
-        for i in range(numAP):
-            f.write(values_AP)
-        for i in range(numPA):
-            f.write(values_PA)
-
-
 def run(ctx,
         AP:               In,
         PA:               In,
@@ -182,10 +44,10 @@ def run(ctx,
         tmp_dir:          Ref):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-         tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        AP_tmp = tmp_dir + '/AP_tmp_'
-        PA_tmp = tmp_dir + '/PA_tmp_'
+        AP_tmp = op.join(tmp_dir, 'AP_tmp_')
+        PA_tmp = op.join(tmp_dir, 'PA_tmp_')
 
         choose_best_B0(ctx, AP, AP_bval, total_B0_AP, AP_indices, AP_tmp,
                        AP_best_index, B0_AP, AP_fieldmap_flag, tmp_dir)
@@ -204,4 +66,3 @@ def run(ctx,
         if (Zslices % 2) != 0:
             wrappers.fslroi(B0_AP_PA, B0_AP_PA, 0, -1, 0, -1, 0, Zslices -1)
         # TODO: Discuss this with Jesper.
-
diff --git a/bip/pipelines/dMRI_fieldmap/fieldmap_pre_topup_fnc.py b/bip/pipelines/dMRI_fieldmap/fieldmap_pre_topup_fnc.py
new file mode 100755
index 0000000000000000000000000000000000000000..4f378281435e54537c8710acc50c7c6e46040a0c
--- /dev/null
+++ b/bip/pipelines/dMRI_fieldmap/fieldmap_pre_topup_fnc.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python
+#
+# fieldmap_pre_topup.py - Sub-pipeline with the fieldmap processing before topup
+#
+# Author: Fidel Alfaro Almagro <fidel.alfaroalmagro@ndcn.ox.ac.uk>
+# Author: Paul McCarthy <pauldmccarthy@gmail.com>
+# Author: Michiel Cottaar <michiel.cottaar@ndcn.ox.ac.uk>
+#
+# pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
+# pylint: disable=W0613,R0912,W0612
+#
+
+import os.path as op
+import glob
+import logging
+from shutil import copyfile
+import numpy as np
+import nibabel as nib
+from fsl import wrappers
+from bip.utils.get_b0s         import get_b0s
+from bip.utils.get_dwell_time  import get_dwell_time
+from bip.utils.read_json_field import read_json_field
+
+log = logging.getLogger(__name__)
+
+def choose_best_B0(ctx, img, bval, total_B0, indices, tmp, best_index, B0,
+                   fieldmap_flag, tmp_dir):
+
+    blah     = op.join(tmp_dir, 'blah.nii.gz')
+    blah_mat = op.join(tmp_dir, 'blah.mat')
+
+    b0_threshold=int(np.loadtxt(ctx.get_data('dMRI/b0_threshold.txt')))
+
+    bvals = np.loadtxt(bval)
+
+    if type(bvals) is not np.ndarray:
+        num_vols = 1
+    else:
+        num_vols = len(np.where(bvals <= b0_threshold)[0])
+
+    get_b0s(img, bval, total_B0, indices, num_vols,  b0_threshold)
+    ind_values = [int(x) for x in np.loadtxt(indices)]
+
+    wrappers.fslsplit(total_B0, tmp)
+
+    files = glob.glob(tmp + '*')
+    files.sort()
+
+    N = len(files)
+
+    if N == 1:
+        with open(best_index, 'wt', encoding="utf-8") as f:
+            f.write('0\n')
+        copyfile(src=total_B0, dst=B0)
+    else:
+        scores= np.zeros([N,N])
+        for i in range(N):
+            for j in range(N):
+                if j > i:
+                    wrappers.flirt(src=files[i], ref=files[j], nosearch=True,
+                                   dof=6, out=blah, omat=blah_mat)
+                    im1  = nib.load(blah)
+                    im2  = nib.load(files[j])
+                    im1f = im1.get_fdata().flatten()
+                    im2f = im2.get_fdata().flatten()
+                    corr = np.corrcoef(im1f,im2f)[0,1]
+                    scores[i,j] = corr
+                    scores[j,i] = corr
+
+        final_scores = np.sum(scores, axis=1) / (N - 1)
+
+        best_ind = final_scores.argmax()
+        best_score = final_scores[best_ind]
+
+        if final_scores[0] > 0.98:
+            best_ind = 0
+            best_score = final_scores[best_ind]
+        if best_score < 0.95:
+            with open(fieldmap_flag, 'wt', encoding="utf-8") as f:
+                f.write(f'The best score was {best_score}.' +
+                 'The subject will probably have problems with this DWI data\n')
+        with open(best_index, 'wt', encoding="utf-8") as f:
+            f.write(f'{ind_values[best_ind]}\n')
+            copyfile(src=files[best_ind], dst=B0)
+
+
+def generate_acqparams(AP, PA, AP_json, PA_json, numAP, numPA, acqparams):
+
+    enc_dir_AP = read_json_field(fileName=AP_json,
+                                 fieldName="PhaseEncodingDirection")
+    enc_dir_PA = read_json_field(fileName=PA_json,
+                                 fieldName="PhaseEncodingDirection")
+
+    # TODO: Write a way to evaluate that they are indeed opposite directions
+
+    # Generating the specific strings for the acqparams file for AP
+    # according to the Phase Encoding Direction in the AP json file
+    if enc_dir_AP == "j-":
+        dim_AP = 2
+        values_AP = "0 -1 0"
+    elif enc_dir_AP == "j":
+        dim_AP = 2
+        values_AP = "0 1 0"
+    elif enc_dir_AP == "i-":
+        dim_AP = 1
+        values_AP = "-1 0 0"
+    elif enc_dir_AP == "i":
+        dim_AP = 1
+        values_AP = "1 0 0"
+    elif enc_dir_AP == "k-":
+        dim_AP = 3
+        values_AP = "0 0 -1"
+    else:
+        dim_AP = 3
+        values_AP = "0 0 1"
+
+    # Generating the specific strings for the acqparams file for PA
+    # according to the Phase Encoding Direction in the PA json file
+    if enc_dir_PA == "j-":
+        #dim_PA = 2
+        values_PA = "0 -1 0"
+    elif enc_dir_PA == "j":
+        #dim_PA = 2
+        values_PA = "0 1 0"
+    elif enc_dir_PA == "i-":
+        #dim_PA = 1
+        values_PA = "-1 0 0"
+    elif enc_dir_PA == "i":
+        #dim_PA = 1
+        values_PA = "1 0 0"
+    elif enc_dir_PA == "k-":
+        #dim_PA = 3
+        values_PA = "0 0 -1"
+    else:
+        #dim_PA = 3
+        values_PA = "0 0 1"
+
+
+    imAP = nib.load(AP)
+    numlines = imAP.header['dim'][dim_AP]
+
+    dtiDwell = get_dwell_time(AP, AP_json)
+
+    topupValue = (dtiDwell * (numlines -1)) / 1000.0
+
+    values_AP = values_AP + " " + str(topupValue) + "\n"
+    values_PA = values_PA + " " + str(topupValue) + "\n"
+
+    with open(acqparams, 'wt', encoding="utf-8") as f:
+        for i in range(numAP):
+            f.write(values_AP)
+        for i in range(numPA):
+            f.write(values_PA)
diff --git a/bip/pipelines/fMRI_rest/rfMRI_fix.py b/bip/pipelines/fMRI_rest/rfMRI_fix.py
index abe65fc79647dd7b09ae2b5a51d453c6daa1a4f8..781af126f17a56ee3c80bb40d4c38d98d78bf013 100755
--- a/bip/pipelines/fMRI_rest/rfMRI_fix.py
+++ b/bip/pipelines/fMRI_rest/rfMRI_fix.py
@@ -10,6 +10,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from shutil import copyfile
 from pipe_tree import In, Out, Ref
@@ -42,22 +43,22 @@ def run(ctx,
     with redirect_logging('rfMRI_fix', outdir=logs_dir):
 
         # Generate files needed to extract features
-        if not os.path.exists(rfMRI_highres_pveseg):
+        if not op.exists(rfMRI_highres_pveseg):
             copyfile(T1_fast_pveseg, rfMRI_highres_pveseg)
-        if not os.path.exists(rfMRI_standard2example_func_warp):
+        if not op.exists(rfMRI_standard2example_func_warp):
             wrappers.invwarp(ref=rfMRI_example_func,
                              warp=rfMRI_example_func2standard_warp,
                              out=rfMRI_standard2example_func_warp)
 
         # FIX: Extract features for subject
-        if not os.path.exists(fix_features):
+        if not op.exists(fix_features):
             d = extract.FixData.from_melodic_dir(rfMRI_ica, fixdir = fix_dir)
 
             f = extract.extract_features(data=d,
                                       features=legacy.LEGACY_FEATURE_EXTRACTORS)
-            io.save_features(os.path.join(fix_dir, 'features'), f)
+            io.save_features(op.join(fix_dir, 'features'), f)
         else:
-            f = io.read_features(os.path.join(fix_dir, 'features'))
+            f = io.read_features(op.join(fix_dir, 'features'))
 
         # FIX: Classify components for subject
         clf = io.load_model(ctx.get_data('/fix/UKBB_model_2.pyfix_model'))
@@ -85,7 +86,7 @@ def run(ctx,
 
         # Create this directory in case it does not exist. This may
         # happen due to the overwrite function issue in Melodic
-        if not os.path.exists(rfMRI_reg_standard_dir):
+        if not op.exists(rfMRI_reg_standard_dir):
             os.mkdir(rfMRI_reg_standard_dir)
 
         # Generate the data in standard space
diff --git a/bip/pipelines/fMRI_rest/rfMRI_melodic.py b/bip/pipelines/fMRI_rest/rfMRI_melodic.py
index 8469ae8b22afca718af4d52b5f00beb705105848..33ae209540f1dd0f9137a6e56d067dfa13e52d21 100755
--- a/bip/pipelines/fMRI_rest/rfMRI_melodic.py
+++ b/bip/pipelines/fMRI_rest/rfMRI_melodic.py
@@ -12,6 +12,7 @@
 #
 
 import os
+import os.path as op
 import shutil
 import logging
 from fsl import wrappers
@@ -44,7 +45,7 @@ def run(ctx,
         name_dir_alt[-2] = name_dir_alt[-2] + "+"
         name_dir_alt = ".".join(name_dir_alt)
 
-        if os.path.exists(name_dir_alt):
-            if os.path.exists(rfMRI_ica):
+        if op.exists(name_dir_alt):
+            if op.exists(rfMRI_ica):
                 shutil.rmtree(rfMRI_ica)
             os.rename(name_dir_alt, rfMRI_ica)
diff --git a/bip/pipelines/fMRI_rest/rfMRI_netmats.py b/bip/pipelines/fMRI_rest/rfMRI_netmats.py
index c110d3d82b5fbd22c88be835b83e805668be43c7..651b83ede834c0a201f7c8cd31264c034cfaf5c6 100755
--- a/bip/pipelines/fMRI_rest/rfMRI_netmats.py
+++ b/bip/pipelines/fMRI_rest/rfMRI_netmats.py
@@ -10,6 +10,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from fsl import wrappers
 from pipe_tree import In, Out, Ref, Var
@@ -39,7 +40,7 @@ def run(ctx,
 
         # Create this directory in case it does not exist. This may
         # happen due to the overwrite function issue in Melodic
-        if not os.path.exists(netmats_dir):
+        if not op.exists(netmats_dir):
             os.mkdir(netmats_dir)
 
 
diff --git a/bip/pipelines/fMRI_rest/rfMRI_netmats_fnc.py b/bip/pipelines/fMRI_rest/rfMRI_netmats_fnc.py
index dcc4d5e325fb213cf6b30f699694ef5fa3570d11..106575f1fb8281cb4fbcdc4581c5292f5701541b 100755
--- a/bip/pipelines/fMRI_rest/rfMRI_netmats_fnc.py
+++ b/bip/pipelines/fMRI_rest/rfMRI_netmats_fnc.py
@@ -10,6 +10,7 @@
 # pylint: disable=E1130
 #
 
+import os.path as op
 import numpy as np
 import nibabel as nib
 
@@ -79,7 +80,7 @@ def nets_netmats(ts, do_rtoz, method, arg_method=None):
 
     elif method.lower() in ['ridgep']:
         grot=np.cov(grot, rowvar=False)
-        grot=grot/np.sqrt(np.nanmean(np.diag(grot) * np.diag(grot)))
+        grot=grot / np.sqrt(np.nanmean(np.diag(grot) * np.diag(grot)))
         if not arg_method:
             rho=0.1
         else:
diff --git a/bip/pipelines/fMRI_rest/rfMRI_prepare.py b/bip/pipelines/fMRI_rest/rfMRI_prepare.py
index 8af1d403bf28f17c91b32d15e2d551ddc796ab29..d469ff45355f761a03ae83294e9a4f638b3632ee 100755
--- a/bip/pipelines/fMRI_rest/rfMRI_prepare.py
+++ b/bip/pipelines/fMRI_rest/rfMRI_prepare.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from shutil import copyfile
 import nibabel as nib
@@ -48,20 +49,23 @@ def run(ctx,
 
         # Creates links
         # TODO: These links ARE hard-coded. This may cause future problems.
-        if not os.path.exists(rfMRI_T1):
-            os.symlink(src="../../T1/T1.nii.gz",
-                       dst=rfMRI_T1)
-        if not os.path.exists(rfMRI_T1_brain):
-            os.symlink(src="../../T1/T1_brain.nii.gz",
-                       dst=rfMRI_T1_brain)
-        if not os.path.exists(rfMRI_T1_brain2MNI152_T1_2mm_brain_warp):
-            os.symlink(src="../../T1/transforms/T1_to_MNI_warp.nii.gz",
-                       dst=rfMRI_T1_brain2MNI152_T1_2mm_brain_warp)
-        if not os.path.exists(rfMRI_T1_brain2MNI152_T1_2mm_brain_mat):
-            os.symlink(src="../../T1/transforms/T1_to_MNI_linear.mat",
-                       dst=rfMRI_T1_brain2MNI152_T1_2mm_brain_mat)
-
-        wrappers.fslmaths(T1_fast_pve_2).thr(0.5).bin().run(rfMRI_T1_brain_wmseg)
+        if not op.exists(rfMRI_T1):
+            rel_path = op.relpath(T1, rfMRI_T1)
+            os.symlink(src=rel_path, dst=rfMRI_T1)
+        if not op.exists(rfMRI_T1_brain):
+            rel_path = op.relpath(T1_brain, rfMRI_T1_brain)
+            os.symlink(src=rel_path, dst=rfMRI_T1_brain)
+        if not op.exists(rfMRI_T1_brain2MNI152_T1_2mm_brain_warp):
+            rel_path = op.relpath(T1_to_MNI_warp, 
+                                  rfMRI_T1_brain2MNI152_T1_2mm_brain_warp)
+            os.symlink(src=rel_path,dst=rfMRI_T1_brain2MNI152_T1_2mm_brain_warp)
+        if not op.exists(rfMRI_T1_brain2MNI152_T1_2mm_brain_mat):
+            rel_path = op.relpath(T1_to_MNI_linear_mat, 
+                                  rfMRI_T1_brain2MNI152_T1_2mm_brain_mat)
+            os.symlink(src=rel_path, dst=rfMRI_T1_brain2MNI152_T1_2mm_brain_mat)
+
+        wrappers.fslmaths(T1_fast_pve_2).thr(0.5).bin().\
+            run(rfMRI_T1_brain_wmseg)
 
         # Generation of FSF file
         copyfile(src=ctx.get_data('fMRI_fsf/design_ICA_nonSmoothed.fsf'),
@@ -98,16 +102,31 @@ def run(ctx,
             copyfile(src=rfMRI_SBREF, dst=rfMRI_SBREF_ud)
 
         with open(rfMRI_fsf, 'a', encoding="utf-8") as f:
-            f.write('set fmri(regstandard) "' + ctx.FSLDIR + \
-                    '/data/standard/MNI152_T1_2mm_brain"\n')
-            f.write('set fmri(outputdir) "' + os.getcwd() + "/" + rfMRI_ica + '"\n')
-            f.write('set feat_files(1) "' + os.getcwd() + "/" + rfMRI + '"\n')
-            f.write('set alt_ex_func(1) "' + os.getcwd() + "/" + rfMRI_SBREF + '"\n')
-            f.write('set unwarp_files(1) "' + os.getcwd() + "/" + fieldmap_fout_to_T1_brain_rad + '"\n')
-            f.write('set unwarp_files_mag(1) "' + os.getcwd() + "/" + rfMRI_T1_brain + '"\n')
-            f.write('set highres_files(1) "' + os.getcwd() + "/" + rfMRI_T1_brain + '"\n')
+            file_name = ctx.get_standard("MNI152_T1_2mm_brain")
+            f.write('set fmri(regstandard) "' + file_name + '"\n')
+
+            file_name = op.join(os.getcwd(), rfMRI_ica)
+            f.write('set fmri(outputdir) "' + file_name + '"\n')
+
+            file_name = op.join(os.getcwd(), rfMRI)
+            f.write('set feat_files(1) "' + file_name + '"\n')
+
+            file_name = op.join(os.getcwd(), rfMRI_SBREF)
+            f.write('set alt_ex_func(1) "' + file_name + '"\n')
+
+            file_name = op.join(os.getcwd(), fieldmap_fout_to_T1_brain_rad)
+            f.write('set unwarp_files(1) "' + file_name + '"\n')
+
+            file_name = op.join(os.getcwd(), rfMRI_T1_brain)
+            f.write('set unwarp_files_mag(1) "' + file_name + '"\n')
+
+            file_name = op.join(os.getcwd(), rfMRI_T1_brain)
+            f.write('set highres_files(1) "' + file_name + '"\n')
+
             if ctx.gdc != '':
-                f.write('set fmri(gdc) "' + os.getcwd() + "/" + rfMRI_SBREF_ud_warp + '"\n')
+                file_name = op.join(os.getcwd(), rfMRI_SBREF_ud_warp)
+                f.write('set fmri(gdc) "' + file_name + '"\n')
+
             f.write('set fmri(tr) ' + str(fmriTR) + '\n')
             f.write('set fmri(npts) ' + str(fmriNumVol) + '\n')
             f.write('set fmri(dwell) ' + str(fmriDwell) + '\n')
diff --git a/bip/pipelines/fMRI_task/tfMRI_feat.py b/bip/pipelines/fMRI_task/tfMRI_feat.py
index 6b275525175a3f2a64c06595ddccd59ffa3a75c2..318a710827a031d7a8b5b4f5979556d5b8aec119 100755
--- a/bip/pipelines/fMRI_task/tfMRI_feat.py
+++ b/bip/pipelines/fMRI_task/tfMRI_feat.py
@@ -10,6 +10,9 @@
 # pylint: disable=W0613
 #
 
+import os
+import os.path as op
+import shutil
 import logging
 from fsl import wrappers
 from bip.utils.log_utils import redirect_logging
@@ -47,7 +50,7 @@ def run(ctx,
         name_dir_alt[-2] = name_dir_alt[-2] + "+"
         name_dir_alt = ".".join(name_dir_alt)
 
-        if os.path.exists(name_dir_alt):
-            if os.path.exists(tfMRI_feat):
+        if op.exists(name_dir_alt):
+            if op.exists(tfMRI_feat):
                 shutil.rmtree(tfMRI_feat)
             os.rename(name_dir_alt, tfMRI_feat)
diff --git a/bip/pipelines/fMRI_task/tfMRI_prepare.py b/bip/pipelines/fMRI_task/tfMRI_prepare.py
index 06bc292f207f208142074a436bb2c35fd8fcbeb4..c309b45ea5513239d50f2d0236f3cdd1ba16f1ee 100755
--- a/bip/pipelines/fMRI_task/tfMRI_prepare.py
+++ b/bip/pipelines/fMRI_task/tfMRI_prepare.py
@@ -11,6 +11,7 @@
 #
 
 import os
+import os.path as op
 import logging
 from shutil import copyfile
 import nibabel as nib
@@ -48,20 +49,23 @@ def run(ctx,
 
         # Creates links
         # TODO: These links ARE hard-coded. This may cause future problems.
-        if not os.path.exists(tfMRI_T1):
-            os.symlink(src="../../T1/T1.nii.gz",
-                       dst=tfMRI_T1)
-        if not os.path.exists(tfMRI_T1_brain):
-            os.symlink(src="../../T1/T1_brain.nii.gz",
-                       dst=tfMRI_T1_brain)
-        if not os.path.exists(tfMRI_T1_brain2MNI152_T1_2mm_brain_warp):
-            os.symlink(src="../../T1/transforms/T1_to_MNI_warp.nii.gz",
-                       dst=tfMRI_T1_brain2MNI152_T1_2mm_brain_warp)
-        if not os.path.exists(tfMRI_T1_brain2MNI152_T1_2mm_brain_mat):
-            os.symlink(src="../../T1/transforms/T1_to_MNI_linear.mat",
-                       dst=tfMRI_T1_brain2MNI152_T1_2mm_brain_mat)
-
-        wrappers.fslmaths(T1_fast_pve_2).thr(0.5).bin().run(tfMRI_T1_brain_wmseg)
+        if not op.exists(tfMRI_T1):
+            rel_path = op.relpath(T1, tfMRI_T1)
+            os.symlink(src=rel_path, dst=tfMRI_T1)
+        if not op.exists(tfMRI_T1_brain):
+            rel_path = op.relpath(T1_brain, tfMRI_T1_brain)
+            os.symlink(src=rel_path, dst=tfMRI_T1_brain)
+        if not op.exists(tfMRI_T1_brain2MNI152_T1_2mm_brain_warp):
+            rel_path = op.relpath(T1_to_MNI_warp, 
+                                  tfMRI_T1_brain2MNI152_T1_2mm_brain_warp)
+            os.symlink(src=rel_path,dst=tfMRI_T1_brain2MNI152_T1_2mm_brain_warp)
+        if not op.exists(tfMRI_T1_brain2MNI152_T1_2mm_brain_mat):
+            rel_path = op.relpath(T1_to_MNI_linear_mat, 
+                                  tfMRI_T1_brain2MNI152_T1_2mm_brain_mat)
+            os.symlink(src=rel_path,dst=tfMRI_T1_brain2MNI152_T1_2mm_brain_mat)
+
+        wrappers.fslmaths(T1_fast_pve_2).thr(0.5).bin().\
+            run(tfMRI_T1_brain_wmseg)
 
         # Generation of FSF file
         copyfile(src=ctx.get_data('fMRI_fsf/design_TASK.fsf'), dst=tfMRI_fsf)
@@ -97,18 +101,37 @@ def run(ctx,
             copyfile(src=tfMRI_SBREF, dst=tfMRI_SBREF_ud)
 
         with open(tfMRI_fsf, 'a', encoding="utf-8") as f:
-            f.write('set fmri(regstandard) "' + ctx.FSLDIR + \
-                    '/data/standard/MNI152_T1_2mm_brain"\n')
-            f.write('set fmri(custom1) "' + ctx.get_data('fMRI_fsf/designS.txt') + '"\n')
-            f.write('set fmri(custom2) "' + ctx.get_data('fMRI_fsf/designF.txt') + '"\n')
-            f.write('set fmri(outputdir) "' + os.getcwd() + "/" + tfMRI_feat + '"\n')
-            f.write('set feat_files(1) "' + os.getcwd() + "/" + tfMRI + '"\n')
-            f.write('set alt_ex_func(1) "' + os.getcwd() + "/" + tfMRI_SBREF + '"\n')
-            f.write('set unwarp_files(1) "' + os.getcwd() + "/" + fieldmap_fout_to_T1_brain_rad + '"\n')
-            f.write('set unwarp_files_mag(1) "' + os.getcwd() + "/" + tfMRI_T1_brain + '"\n')
-            f.write('set highres_files(1) "' + os.getcwd() + "/" + tfMRI_T1_brain + '"\n')
+            file_name = ctx.get_standard("MNI152_T1_2mm_brain")
+            f.write('set fmri(regstandard) "' + file_name + '"\n')
+
+            file_name = ctx.get_data('fMRI_fsf/designS.txt')
+            f.write('set fmri(custom1) "' + file_name + '"\n')
+
+            file_name = ctx.get_data('fMRI_fsf/designF.txt')
+            f.write('set fmri(custom2) "' + file_name + '"\n')
+
+            file_name = op.join(os.getcwd(), tfMRI_feat)
+            f.write('set fmri(outputdir) "' + file_name + '"\n')
+
+            file_name = op.join(os.getcwd(), tfMRI)
+            f.write('set feat_files(1) "' + file_name + '"\n')
+
+            file_name = op.join(os.getcwd(), tfMRI_SBREF)
+            f.write('set alt_ex_func(1) "' + file_name + '"\n')
+
+            file_name = op.join(os.getcwd(), fieldmap_fout_to_T1_brain_rad)
+            f.write('set unwarp_files(1) "' + file_name + '"\n')
+
+            file_name = op.join(os.getcwd(), tfMRI_T1_brain)
+            f.write('set unwarp_files_mag(1) "' + file_name + '"\n')
+
+            file_name = op.join(os.getcwd(), tfMRI_T1_brain)
+            f.write('set highres_files(1) "' + file_name + '"\n')
+
             if ctx.gdc != '':
-                f.write('set fmri(gdc) "' + os.getcwd() + "/" + tfMRI_SBREF_ud_warp + '"\n')
+                file_name = op.join(os.getcwd(), tfMRI_SBREF_ud_warp)
+                f.write('set fmri(gdc) "' + file_name + '"\n')
+
             f.write('set fmri(tr) ' + str(fmriTR) + '\n')
             f.write('set fmri(npts) ' + str(fmriNumVol) + '\n')
             f.write('set fmri(dwell) ' + str(fmriDwell) + '\n')
diff --git a/bip/pipelines/struct_FS/FS_get_IDPs.py b/bip/pipelines/struct_FS/FS_get_IDPs.py
index 3dee9ab76100572fc21b351892586b1b76247682..f1f37129deca8983ad7a4c1902ffc27186cbdef6 100755
--- a/bip/pipelines/struct_FS/FS_get_IDPs.py
+++ b/bip/pipelines/struct_FS/FS_get_IDPs.py
@@ -11,9 +11,8 @@
 #
 
 import os
-import sys
+import os.path as op
 import logging
-import argparse
 from pipe_tree import In, Out, Ref
 from bip.pipelines.struct_FS.FS_get_IDPs_fnc import bb_FS_get_IDPs
 from bip.utils.log_utils import redirect_logging, job_name
@@ -28,6 +27,6 @@ def run(ctx,
 
     with redirect_logging(job_name(run), outdir=logs_dir):
 
-        env = dict(os.environ, SUBJECTS_DIR=os.getcwd() + "/" + ctx.subject)
+        env = dict(os.environ, SUBJECTS_DIR=op.join(os.getcwd(), ctx.subject))
 
         bb_FS_get_IDPs(ctx, env)
diff --git a/bip/pipelines/struct_FS/FS_get_IDPs_fnc.py b/bip/pipelines/struct_FS/FS_get_IDPs_fnc.py
index 3a14ccf4e66f4078c34c6066b3c94741aa8741c8..44fef49b535ea503a9676a75ba515fe14cd8b42e 100755
--- a/bip/pipelines/struct_FS/FS_get_IDPs_fnc.py
+++ b/bip/pipelines/struct_FS/FS_get_IDPs_fnc.py
@@ -11,24 +11,12 @@
 #
 
 import os
-import sys
+import os.path as op
 import logging
-import argparse
-from pipe_tree import In, Out, Ref
-from bip.utils.log_utils import redirect_logging,  run_command
+from bip.utils.log_utils import run_command
 
 log = logging.getLogger(__name__)
 
-class MyParser(argparse.ArgumentParser):
-    def error(self, message):
-        sys.stderr.write(f'error: {message}\n')
-        self.print_help()
-        sys.exit(2)
-
-class Usage(Exception):
-    def __init__(self, msg):
-        self.msg = msg
-
 def check_and_create_dir(dirName):
     try:
         os.stat(dirName)
@@ -47,49 +35,51 @@ def read_file(fileName):
 def generate_FS_IDP_files(SUBJECTS_DIR, subject_ID, subject, dataDir,
                           headersDir, ctx, env):
 
-    statsDir = SUBJECTS_DIR + '/' + subject_ID + '/stats/'
+    statsDir = op.join(SUBJECTS_DIR, subject_ID, 'stats') + os.sep
 
     #TODO: Include a pre-requisite that python2.7 must be availble in the system
-    if os.path.isfile(statsDir + 'aseg.stats'):
+    if op.isfile(statsDir + 'aseg.stats'):
         run_command(log, 'asegstats2table  '+
            ' -m volume --all-segs --tablefile ' + dataDir + 'aseg_1.txt ' +
-           ' --subjects ' + subject_ID + ' --skip', env)
+           ' --subjects ' + subject_ID + ' --skip', env=env)
         run_command(log, 'asegstats2table  '+
             ' -m mean --all-segs --tablefile ' + dataDir +
-            'aseg_intensity.txt ' +' --subjects ' + subject_ID + ' --skip', env)
+            'aseg_intensity.txt ' +' --subjects ' + subject_ID + ' --skip', 
+            env=env)
 
-    if os.path.isfile(statsDir + 'lh.w-g.pct.stats'):
+    if op.isfile(statsDir + 'lh.w-g.pct.stats'):
         run_command(log, 'asegstats2table ' +
             ' -m mean --all-segs --stats=lh.w-g.pct.stats ' +
             ' --tablefile ' + dataDir + 'wg_lh_mean.txt ' +
-            ' --subjects ' + subject_ID + ' --skip', env)
+            ' --subjects ' + subject_ID + ' --skip', env=env)
 
-    if os.path.isfile(statsDir + 'rh.w-g.pct.stats'):
+    if op.isfile(statsDir + 'rh.w-g.pct.stats'):
         run_command(log, 'asegstats2table ' +
             ' -m mean --all-segs --stats=rh.w-g.pct.stats ' +
             ' --tablefile ' + dataDir + 'wg_rh_mean.txt ' +
-            ' --subjects ' + subject_ID + ' --skip', env)
+            ' --subjects ' + subject_ID + ' --skip', env=env)
 
     for hemi in ["lh", "rh"]:
         for value in ["volume", "area", "thickness"]:
             for atlas in ["BA_exvivo", "aparc.DKTatlas", "aparc.a2009s",
                           "aparc"]:
                 outFileName = dataDir + atlas + '_' + hemi + '_' + value + '.txt'
-                if os.path.isfile(statsDir + hemi + "." + atlas + '.stats'):
+                if op.isfile(statsDir + hemi + "." + atlas + '.stats'):
                     run_command(log, 'aparcstats2table '+
                         ' -m ' + value + ' --hemi=' + hemi +
                         ' --tablefile ' + outFileName +
-                        ' --subjects ' + subject_ID + ' --skip -p ' + atlas,env)
+                        ' --subjects ' + subject_ID + ' --skip -p ' + atlas,
+                        env=env)
 
     atlas="aparc.pial"
     value="area"
     for hemi in ["lh", "rh"]:
         outFileName = dataDir + atlas + '_' + hemi + '_' + value + '.txt'
-        if os.path.isfile(statsDir + hemi + '.aparc.pial.stats'):
+        if op.isfile(statsDir + hemi + '.aparc.pial.stats'):
             run_command(log, 'aparcstats2table '+
                 ' -m ' + value + ' --hemi=' + hemi +
                 ' --tablefile ' + outFileName +
-                ' --subjects ' + subject_ID + ' --skip -p ' + atlas, env)
+                ' --subjects ' + subject_ID + ' --skip -p ' + atlas, env=env)
 
     with open(ctx.get_data('FS/FS_initial_files.txt'), encoding="utf-8") as f:
         files_generated = [x.replace('\n','').split(" ") for x in f.readlines()]
@@ -97,7 +87,7 @@ def generate_FS_IDP_files(SUBJECTS_DIR, subject_ID, subject, dataDir,
     data_dict={}
 
     for file_generated in files_generated:
-        if os.path.isfile(dataDir + file_generated[0] + '.txt'):
+        if op.isfile(dataDir + file_generated[0] + '.txt'):
             data = read_file(dataDir + file_generated[0] + '.txt')
         else:
             data = read_file(ctx.get_data('FS/FS_data_ex/' +
@@ -114,17 +104,17 @@ def check_consistency(data_dict, SUBJECTS_DIR, ctx):
 
     for file_generated in data_dict.keys():
         if len(data_dict[file_generated])>2:
-            print("Error in " + file_generated + ': File has more than 2 lines')
             save_data_NaNs(SUBJECTS_DIR, ctx)
-            sys.exit(-1)
+            raise Exception("Error in " + file_generated + \
+                            ': File has more than 2 lines')
 
         len0=len(data_dict[file_generated][0])
         len1=len(data_dict[file_generated][1])
 
         if len0 != len1:
-            print("Error in " + file_generated + ': Inconsistent # of features')
             save_data_NaNs(SUBJECTS_DIR, ctx)
-            sys.exit(-1)
+            raise Exception("Error in " + file_generated + \
+                            ': Inconsistent # of features')
 
 
 def fix_aseg_data(data_dict, subjectDir):
@@ -148,7 +138,8 @@ def fix_aseg_data(data_dict, subjectDir):
 
     # For some reason, the VentricleChoroidVol is not caught by asegstats2table
     try:
-        with open(subjectDir + '/stats/aseg.stats', 'r', encoding="utf-8") as f:
+        file_name = op.join(subjectDir, 'stats' , 'aseg.stats')
+        with open(, 'r', encoding="utf-8") as f:
             val=[x.split(',')[3].strip() for x in f.readlines() if 'VentricleChoroidVol' in x]
     except:
         val=["NaN"]
@@ -207,7 +198,7 @@ def gen_aparc_special(data_dict, subjectDir):
 
 
 def bool_FLAIR(data_dict, subjectDir):
-    if os.path.isfile(subjectDir + '/mri/FLAIR.mgz'):
+    if op.isfile(subjectDir + '/mri/FLAIR.mgz'):
         data_dict['FLAIR'] = [['Use-T2-FLAIR-for-FreeSurfer'],['1']]
     else:
         data_dict['FLAIR'] = [['Use-T2-FLAIR-for-FreeSurfer'],['0']]
@@ -233,7 +224,7 @@ def gen_subsegmentation(data_dict, subjectDir, subject, ctx):
         data_dict[struct] = [[],[]]
         for fil in struct_data[struct][0]:
             final_fil = subjectDir + '/FreeSurfer/mri/' + fil
-            if os.path.isfile(final_fil):
+            if op.isfile(final_fil):
                 with open(final_fil, 'r',encoding="utf-8") as f:
                     for lin in f.readlines():
                         lin = lin.replace('\n','').split(' ')
@@ -518,9 +509,8 @@ def bb_FS_get_IDPs(ctx, env):
     headersDir = subjectDir + '/headers/'
 
     #TODO: Raise an exception
-    if not os.path.isdir(subjectDir):
-        print("Error: FreeSurfer has not been run on this subject")
-        sys.exit(-1)
+    if not op.isdir(subjectDir):
+        raise Exception("Error: FreeSurfer has not been run on this subject")
 
     check_and_create_dir(dataDir)
     check_and_create_dir(headersDir)
@@ -537,16 +527,3 @@ def bb_FS_get_IDPs(ctx, env):
 
     check_consistency(data_dict, subjectDir, ctx)
     save_data(data_dict, subjectDir, ctx)
-
-
-def run(ctx,
-        ThalamicNuclei:             In,
-        rh_entorhinal_exvivo_label: In,
-        logs_dir:                   Ref,
-        FS_IDPs:                    Out):
-
-    with redirect_logging('FS_get_IDPs', outdir=logs_dir):
-
-        env = dict(os.environ, SUBJECTS_DIR=os.getcwd() + "/" + ctx.subject)
-
-        bb_FS_get_IDPs(ctx, env)
diff --git a/bip/pipelines/struct_FS/FS_proc.py b/bip/pipelines/struct_FS/FS_proc.py
index 7e3f834b4ea26bccf3a485328ff1e0a55641ff25..3b4f14e922c2792bcd884ae309249e72ccf1dc84 100755
--- a/bip/pipelines/struct_FS/FS_proc.py
+++ b/bip/pipelines/struct_FS/FS_proc.py
@@ -11,10 +11,11 @@
 #
 
 import os
+import os.path as op
 import shutil
 import logging
 from pipe_tree import In, Out, Ref
-from bip.utils.log_utils import redirect_logging, run_command
+from bip.utils.log_utils import redirect_logging, run_command, job_name
 
 log = logging.getLogger(__name__)
 
@@ -29,24 +30,24 @@ def run(ctx,
     with redirect_logging(job_name(run), outdir=logs_dir):
 
         # We need to delete the folder because otherwise, FreeSurfer complains
-        if os.path.exists(FreeSurfer_dir):
+        if op.exists(FreeSurfer_dir):
             shutil.rmtree(FreeSurfer_dir)
 
         #TODO: SUBJECTS_DIR can be an argument of freesurfer instead of this craps
-        env = dict(os.environ, SUBJECTS_DIR=os.getcwd() + "/" + ctx.subject)
+        env = dict(os.environ, SUBJECTS_DIR=op.join(os.getcwd(), ctx.subject))
 
-        T1_path = os.getcwd() + "/" + T1_unbiased
-        T2_path = os.getcwd() + "/" + T2_FLAIR_unbiased
+        T1_path = op.join(os.getcwd(), T1_unbiased)
+        T2_path = op.join(os.getcwd(), T2_FLAIR_unbiased)
 
         cmd = 'recon-all -all -s FreeSurfer -i ' + T1_path
 
         #TODO: Do this for all cases of os.path.exits (all optional things)
-        if T2_FLAIR_unbiased is not None and os.path.exists(T2_FLAIR_unbiased):
+        if T2_FLAIR_unbiased is not None and op.exists(T2_FLAIR_unbiased):
             cmd += " -FLAIR " + T2_path + " -FLAIRpial"
 
         try:
             output = run_command(log, cmd, env=env)      
             log.info(output)
         finally:
-            if os.path.exists(fsaverage):
+            if op.exists(fsaverage):
                 os.unlink(fsaverage)
diff --git a/bip/pipelines/struct_FS/FS_segm.py b/bip/pipelines/struct_FS/FS_segm.py
index 0e646224685d391394ca4f0749992b9200356673..7f7fde392eb816754078a9adbba8e184f4b6ebb1 100755
--- a/bip/pipelines/struct_FS/FS_segm.py
+++ b/bip/pipelines/struct_FS/FS_segm.py
@@ -11,9 +11,10 @@
 #
 
 import os
+import os.path as op
 import logging
 from pipe_tree import In, Out, Ref
-from bip.utils.log_utils import redirect_logging, run_command
+from bip.utils.log_utils import redirect_logging, run_command, job_name
 
 log = logging.getLogger(__name__)
 
@@ -27,18 +28,18 @@ def run(ctx,
 
     with redirect_logging(job_name(run), outdir=logs_dir):
 
-        env = dict(os.environ, SUBJECTS_DIR=os.getcwd() + "/" + ctx.subject)
+        env = dict(os.environ, SUBJECTS_DIR=op.join(os.getcwd(), ctx.subject))
 
-        output1 = run_command(log, 'segmentThalamicNuclei.sh FreeSurfer', env)
+        output1 = run_command(log, 'segmentThalamicNuclei.sh FreeSurfer', env=env)
         log.info(output1)
 
-        output2 = run_command(log, 'segmentBS.sh FreeSurfer', env)
+        output2 = run_command(log, 'segmentBS.sh FreeSurfer', env=env)
         log.info(output2)
 
-        if os.path.exists(T2_FLAIR_unbiased):
+        if op.exists(T2_FLAIR_unbiased):
             output3 = run_command(log,
                      'segmentHA_T2.sh FreeSurfer ' + FreSurfer_FLAIR + '  AN 1',
-                      env)
+                      env=env)
         else:
-            output3 = run_command(log, 'segmentHA_T1.sh FreeSurfer', env)
+            output3 = run_command(log, 'segmentHA_T1.sh FreeSurfer', env=env)
         log.info(output3)
diff --git a/bip/pipelines/struct_T1/T1_QC_CNR_corners.py b/bip/pipelines/struct_T1/T1_QC_CNR_corners.py
index dd3d960c1f8c464f42f9534c57fc54bfad7ba486..40c686e20e91b6cb42cd3bb2dc7e416fba888352 100755
--- a/bip/pipelines/struct_T1/T1_QC_CNR_corners.py
+++ b/bip/pipelines/struct_T1/T1_QC_CNR_corners.py
@@ -10,10 +10,11 @@
 # pylint: disable=W0613
 #
 
+import os.path as op
 import logging
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
-from bip.utils.log_utils import redirect_logging, tempdir
+from bip.utils.log_utils import redirect_logging, tempdir, job_name
 
 log = logging.getLogger(__name__)
 
@@ -31,16 +32,16 @@ def run(ctx,
         tmp_dir:                 Ref):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-         tempdir(tmp_dir):
-
-        roi_1_1     = tmp_dir + '/roi_1_1.nii.gz'
-        roi_1_245   = tmp_dir + '/roi_1_245.nii.gz'
-        roi_197_1   = tmp_dir + '/roi_197_1.nii.gz'
-        roi_197_245 = tmp_dir + '/roi_197_245.nii.gz'
-        roi_x_1     = tmp_dir + '/roi_x_1.nii.gz'
-        roi_x_245   = tmp_dir + '/roi_x_245.nii.gz'
-        roi_lower   = tmp_dir + '/roi_lower.nii.gz'
-        roi_upper   = tmp_dir + '/roi_upper.nii.gz'
+         tempdir(op.join(tmp_dir, job_name(run))):
+
+        roi_1_1     = op.join(tmp_dir, 'roi_1_1.nii.gz')
+        roi_1_245   = op.join(tmp_dir, 'roi_1_245.nii.gz')
+        roi_197_1   = op.join(tmp_dir, 'roi_197_1.nii.gz')
+        roi_197_245 = op.join(tmp_dir, 'roi_197_245.nii.gz')
+        roi_x_1     = op.join(tmp_dir, 'roi_x_1.nii.gz')
+        roi_x_245   = op.join(tmp_dir, 'roi_x_245.nii.gz')
+        roi_lower   = op.join(tmp_dir, 'roi_lower.nii.gz')
+        roi_upper   = op.join(tmp_dir, 'roi_upper.nii.gz')
 
 
         in_files   = [T1_orig, T1_notNorm]
diff --git a/bip/pipelines/struct_T1/T1_QC_CNR_eyes.py b/bip/pipelines/struct_T1/T1_QC_CNR_eyes.py
index fac3844700b7552653e4761bc4592de8421802af..3637366a7b3fecc7395fecb9ffcf15ef51533996 100755
--- a/bip/pipelines/struct_T1/T1_QC_CNR_eyes.py
+++ b/bip/pipelines/struct_T1/T1_QC_CNR_eyes.py
@@ -10,13 +10,13 @@
 # pylint: disable=W0613
 #
 
-
 import os
+import os.path as op
 import logging
 import nibabel as nib
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
-from bip.utils.log_utils import redirect_logging, tempdir
+from bip.utils.log_utils import redirect_logging, tempdir, job_name
 
 log = logging.getLogger(__name__)
 
@@ -41,15 +41,15 @@ def run(ctx,
         tmp_dir:               Ref):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-        tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        T1_brain_GM_mask_orig = tmp_dir + '/T1_brain_GM_mask_orig.nii.gz'
-        T1_brain_WM_mask_orig = tmp_dir + '/T1_brain_WM_mask_orig.nii.gz'
-        lmask                 = tmp_dir + '/lmask.nii.gz'
-        rmask                 = tmp_dir + '/rmask.nii.gz'
-        T1_tmp_7              = tmp_dir + '/T1_tmp_7.nii.gz'
+        T1_brain_GM_mask_orig = op.join(tmp_dir, 'T1_brain_GM_mask_orig.nii.gz')
+        T1_brain_WM_mask_orig = op.join(tmp_dir, 'T1_brain_WM_mask_orig.nii.gz')
+        lmask                 = op.join(tmp_dir, 'lmask.nii.gz')
+        rmask                 = op.join(tmp_dir, 'rmask.nii.gz')
+        T1_tmp_7              = op.join(tmp_dir, 'T1_tmp_7.nii.gz')
 
-        if not os.path.exists(T1_orig_ud_warp_inv):
+        if not op.exists(T1_orig_ud_warp_inv):
             wrappers.invwarp(ref=T1_orig, warp=T1_orig_ud_warp,
                              out=T1_orig_ud_warp_inv)
 
diff --git a/bip/pipelines/struct_T1/T1_QC_COG.py b/bip/pipelines/struct_T1/T1_QC_COG.py
index 7236b28349407422b7780455aea77591b9e6a6fe..89d82fe63a2c73718eb84a319421108fdc63eb4d 100755
--- a/bip/pipelines/struct_T1/T1_QC_COG.py
+++ b/bip/pipelines/struct_T1/T1_QC_COG.py
@@ -9,11 +9,12 @@
 # pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
 #
 
+import os.path as op
 import logging
 import numpy as np
 import nibabel as nib
 from fsl import wrappers
-from bip.utils.log_utils import redirect_logging, tempdir
+from bip.utils.log_utils import redirect_logging, tempdir, job_name
 from pipe_tree import In, Out, Ref
 
 log = logging.getLogger(__name__)
@@ -25,10 +26,10 @@ def run(ctx,
         tmp_dir:       Ref):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-         tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        T1_tmp_5 = tmp_dir + '/T1_tmp_5.nii.gz'
-        T1_tmp_6 = tmp_dir + '/T1_tmp_6.nii.gz'
+        T1_tmp_5 = op.join(tmp_dir, 'T1_tmp_5.nii.gz')
+        T1_tmp_6 = op.join(tmp_dir, 'T1_tmp_6.nii.gz')
 
         refSubj = ctx.get_data('MNI/MNI152_T1_1mm_BigFoV_facemask.nii.gz')
 
diff --git a/bip/pipelines/struct_T1/T1_brain_extract.py b/bip/pipelines/struct_T1/T1_brain_extract.py
index c068ef1413328765a0825c2567d30866a6f5703b..f4b1d03329734ecbaaa75bff07f262c9a4481efa 100755
--- a/bip/pipelines/struct_T1/T1_brain_extract.py
+++ b/bip/pipelines/struct_T1/T1_brain_extract.py
@@ -10,11 +10,11 @@
 # pylint: disable=W0613
 #
 
-
+import os.path as op
 import logging
 from shutil import copyfile
 from fsl import wrappers
-from bip.utils.log_utils import redirect_logging, tempdir
+from bip.utils.log_utils import redirect_logging, tempdir, job_name
 from pipe_tree import In, Out, Ref
 
 log = logging.getLogger(__name__)
@@ -40,13 +40,14 @@ def run(ctx,
         T1_to_MNI_warp_coef:     Out,
         T1_to_MNI_warp_jac:      Out):
 
-    with redirect_logging(job_name(run), outdir=logs_dir), tempdir(tmp_dir):
+    with redirect_logging(job_name(run), outdir=logs_dir),
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        T1_tmp_1                  = tmp_dir + '/T1_tmp_1.nii.gz'
-        T1_tmp_2                  = tmp_dir + '/T1_tmp_2.nii.gz'
-        T1_tmp_1_brain            = tmp_dir + '/T1_tmp_1_brain.nii.gz'
-        T1_tmp_orig_ud_to_std_mat = tmp_dir + '/T1_tmp_to_std.mat'
-        T1_tmp                    = tmp_dir + '/T1.nii.gz'
+        T1_tmp_1                  = op.join(tmp_dir, 'T1_tmp_1.nii.gz')
+        T1_tmp_2                  = op.join(tmp_dir, 'T1_tmp_2.nii.gz')
+        T1_tmp_1_brain            = op.join(tmp_dir, 'T1_tmp_1_brain.nii.gz')
+        T1_tmp_orig_ud_to_std_mat = op.join(tmp_dir, 'T1_tmp_to_std.mat')
+        T1_tmp                    = op.join(tmp_dir, 'T1.nii.gz')
 
         #Calculate where does the brain start in the z dimension and extract the roi
         head_top=int(round(float(wrappers.robustfov(T1_orig_ud).stdout[0].split()[7])))
@@ -66,16 +67,16 @@ def run(ctx,
         #Generate the actual affine from the orig_ud volume to the cut version
         #we haveand combine it to have an affine matrix from orig_ud to MNI
         wrappers.flirt(src=T1, ref=T1_orig_ud, omat=T1_to_T1_orig_ud_mat,
-                 schedule = ctx.FSLDIR + '/etc/flirtsch/xyztrans.sch')
+               schedule = op.join(ctx.FSLDIR, 'etc', 'flirtsch', 'xyztrans.sch')
         wrappers.invxfm(inmat=T1_to_T1_orig_ud_mat, omat=T1_orig_ud_to_T1_mat)
-        wrappers.concatxfm(atob=T1_to_T1_orig_ud_mat, btoc=T1_orig_ud_to_std_mat,
+        wrappers.concatxfm(atob=T1_to_T1_orig_ud_mat,btoc=T1_orig_ud_to_std_mat,
                            atoc=T1_to_MNI_linear_mat)
 
         #Non-linear registration to MNI using the previously calculated alignment
         wrappers.fnirt(src = T1, ref = ctx.MNI, aff = T1_to_MNI_linear_mat,
               config = ctx.get_data('fnirt/bb_fnirt.cnf'),
               refmask = ctx.get_data('MNI/MNI152_T1_1mm_brain_mask_dil_GD7.nii.gz'),
-              logout = logs_dir + '/bb_T1_to_MNI_fnirt.log',
+              logout = op.join(logs_dir, 'bb_T1_to_MNI_fnirt.log'),
               cout = T1_to_MNI_warp_coef, fout = T1_to_MNI_warp,
               jout = T1_to_MNI_warp_jac, iout = T1_tmp_2,
               interp = 'spline')
diff --git a/bip/pipelines/struct_T1/T1_defacing.py b/bip/pipelines/struct_T1/T1_defacing.py
index 129c20e919f607883ee0515197de5cf47faf87a8..da4ed929f0c7d0f203fd241bb78e6d7e191b5bb3 100755
--- a/bip/pipelines/struct_T1/T1_defacing.py
+++ b/bip/pipelines/struct_T1/T1_defacing.py
@@ -9,10 +9,11 @@
 # pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
 #
 
+import os.path as op
 import logging
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
-from bip.utils.log_utils import redirect_logging, tempdir
+from bip.utils.log_utils import redirect_logging, tempdir, job_name
 
 log = logging.getLogger(__name__)
 
@@ -29,10 +30,10 @@ def run(ctx,
         tmp_dir:                           Ref):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-         tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        T1_tmp_4                  = tmp_dir + '/T1_tmp_4.nii.gz'
-        T1_tmp_mat                = tmp_dir + '/T1_tmp.mat'
+        T1_tmp_4                  = op.join(tmp_dir, 'T1_tmp_4.nii.gz')
+        T1_tmp_mat                = op.join(tmp_dir, 'T1_tmp.mat')
 
         #TODO: Replace this part with proper call to fsl_deface
         #Defacing T1_orig
diff --git a/bip/pipelines/struct_T1/T1_fast.py b/bip/pipelines/struct_T1/T1_fast.py
index db37b28f528ba73384cfe7cc304193f5a698f42b..0eb8095fa5a34b7576358f5a3916b78a84b293fb 100755
--- a/bip/pipelines/struct_T1/T1_fast.py
+++ b/bip/pipelines/struct_T1/T1_fast.py
@@ -10,9 +10,10 @@
 # pylint: disable=W0613
 #
 
+import os.path as op
 import logging
 from fsl import wrappers
-from bip.utils.log_utils import redirect_logging
+from bip.utils.log_utils import redirect_logging, job_name
 from pipe_tree import In, Out, Ref
 
 log = logging.getLogger(__name__)
@@ -36,7 +37,7 @@ def run(ctx,
     with redirect_logging(job_name(run), outdir=logs_dir):
 
         #Run fast
-        wrappers.fast(T1_brain, out = T1_fast_dir + '/T1_brain', b=True)
+        wrappers.fast(T1_brain, out = op.join(T1_fast_dir, 'T1_brain'), b=True)
 
         #Binarize PVE masks
         wrappers.fslmaths(T1_fast_pve_0).thr(0.5).bin().run(T1_fast_CSF_mask)
diff --git a/bip/pipelines/struct_T1/T1_first.py b/bip/pipelines/struct_T1/T1_first.py
index cb39f91e9efabc785e123cdcfefe3714efe3e4f9..193e1073a9f047f4b5bbd0d10232e927f656ded1 100755
--- a/bip/pipelines/struct_T1/T1_first.py
+++ b/bip/pipelines/struct_T1/T1_first.py
@@ -10,12 +10,13 @@
 # pylint: disable=W0613
 #
 
-import os
+import os.symlink
+import os.path as op
 import glob
 import logging
 from pipe_tree import In, Out, Ref
 from fsl import wrappers
-from bip.utils.log_utils import redirect_logging
+from bip.utils.log_utils import redirect_logging, job_name
 
 
 log = logging.getLogger(__name__)
@@ -30,15 +31,15 @@ def run(ctx,
     with redirect_logging(job_name(run), outdir=logs_dir):
 
         # Creates a link inside T1_first to T1_unbiased_brain.nii.gz
-        if not os.path.exists(T1_first_unbiased_brain):
-            os.symlink(src="../T1_unbiased_brain.nii.gz",
-                       dst=T1_first_unbiased_brain)
+        if not op.exists(T1_first_unbiased_brain):
+            rel_path = op.relpath(T1_unbiased_brain, T1_first_unbiased_brain)
+            os.symlink(src=rel_path, dst=T1_first_unbiased_brain)
         wrappers.run_first_all(input=T1_first_unbiased_brain,
                                output=T1_first_prefix, b=True)
 
         for f in glob.glob(T1_first_prefix + "-*_first*.nii.gz"):
-            if os.path.exists(f):
+            if op.exists(f):
                 os.remove(f)
         for f in glob.glob(T1_first_prefix + "-*_corr*.nii.gz"):
-            if os.path.exists(f):
+            if op.exists(f):
                 os.remove(f)
diff --git a/bip/pipelines/struct_T1/T1_sienax.py b/bip/pipelines/struct_T1/T1_sienax.py
index 6d2be868341c150b52c21f877bbbc97bc2fc8397..eb2230206ba718531e0333394228ffa55e612de8 100755
--- a/bip/pipelines/struct_T1/T1_sienax.py
+++ b/bip/pipelines/struct_T1/T1_sienax.py
@@ -10,10 +10,11 @@
 #
 
 import os
+import os.path as op
 import logging
 from fsl import wrappers
 from fsl.transform import affine, flirt
-from bip.utils.log_utils import redirect_logging, tempdir
+from bip.utils.log_utils import redirect_logging, tempdir, job_name
 from pipe_tree import In, Out, Ref
 
 log = logging.getLogger(__name__)
@@ -40,21 +41,21 @@ def run(ctx,
         tmp_dir:                             Ref):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-         tempdir(tmp_dir):
-
-        T1_tmp_mat_1 = tmp_dir + '/tmp_mat_1.mat'
-        T1_tmp_mat_2 = tmp_dir + '/tmp_mat_2.mat'
-        T1_tmp_mat_3 = tmp_dir + '/tmp_mat_3.mat'
+         tempdir(op.join(tmp_dir, job_name(run))):
+         
+        T1_tmp_mat_1 = op.join(tmp_dir, 'tmp_mat_1.mat')
+        T1_tmp_mat_2 = op.join(tmp_dir, 'tmp_mat_2.mat')
+        T1_tmp_mat_3 = op.join(tmp_dir, 'tmp_mat_3.mat')
 
         FSLDIR  = ctx.FSLDIR
         FSLDATA = FSLDIR + '/data'
         FSLMNI  = FSLDATA + '/standard'
 
-        MNI               = FSLMNI + '/MNI152_T1_1mm.nii.gz'
-        MNI_2mm_brain     = FSLMNI + '/MNI152_T1_2mm_brain.nii.gz'
-        MNI_2mm_skull     = FSLMNI + '/MNI152_T1_2mm_skull.nii.gz'
-        MNI_2mm_structseg = FSLMNI + '/MNI152_T1_2mm_strucseg.nii.gz'
-        MNI_2mm_segperiph = FSLMNI + '/MNI152_T1_2mm_strucseg_periph.nii.gz'
+        MNI               = ctx.get_standard('MNI152_T1_1mm.nii.gz')
+        MNI_2mm_brain     = ctx.get_standard('MNI152_T1_2mm_brain.nii.gz')
+        MNI_2mm_skull     = ctx.get_standard('MNI152_T1_2mm_skull.nii.gz')
+        MNI_2mm_structseg = ctx.get_standard('MNI152_T1_2mm_strucseg.nii.gz')
+        MNI_2mm_segperiph = ctx.get_standard('MNI152_T1_2mm_strucseg_periph.nii.gz')
 
         report = []
 
@@ -66,17 +67,17 @@ def run(ctx,
         #             ${FSLDIR}/data/standard/MNI152_T1_2mm_skull T1_brain_skull
         #             T1_to_MNI_linear.mat
         wrappers.flirt(src=T1_brain, ref=MNI_2mm_brain, omat=T1_tmp_mat_1,
-                       schedule = ctx.FSLDIR + '/etc/flirtsch/pairreg1.sch',
-                       interp="trilinear")
+              schedule = op.join(ctx.FSLDIR, 'etc', 'flirtsch', 'pairreg1.sch'),
+              interp="trilinear")
         wrappers.flirt(src=T1_sienax_brain_skull, ref=MNI_2mm_skull,
-                      omat=T1_tmp_mat_2, init=T1_tmp_mat_1, interp="trilinear",
-                      schedule = ctx.FSLDIR + '/etc/flirtsch/pairreg2.sch')
+              omat=T1_tmp_mat_2, init=T1_tmp_mat_1, interp="trilinear",
+              schedule = op.join(ctx.FSLDIR, 'etc', 'flirtsch', 'pairreg2.sch'))
         wrappers.fixscaleskew(inmat1=T1_tmp_mat_1, inmat2=T1_tmp_mat_2,
                              omat=T1_tmp_mat_3)
         wrappers.flirt(src=T1_brain, ref=MNI_2mm_brain,
-                       omat=T1_sienax_to_MNI_linear_mat, init=T1_tmp_mat_3,
-                       schedule = ctx.FSLDIR + '/etc/flirtsch/pairreg3.sch',
-                       interp="trilinear")
+              omat=T1_sienax_to_MNI_linear_mat, init=T1_tmp_mat_3,
+              schedule = op.join(ctx.FSLDIR, 'etc', 'flirtsch', 'pairreg3.sch'),
+              interp="trilinear")
 
         matrix = flirt.readFlirt(T1_to_MNI_linear_mat)
         scales = affine.decompose(matrix)[0]
@@ -92,15 +93,18 @@ def run(ctx,
         wrappers.applywarp(src=MNI_2mm_segperiph, ref=T1,
                            w=T1_to_MNI_warp_coef_inv, out=T1_sienax_segperiph,
                            rel=True, interp='trilinear')
-        wrappers.fslmaths(T1_sienax_segperiph).thr(0.5).bin().run(T1_sienax_segperiph)
+        wrappers.fslmaths(T1_sienax_segperiph).thr(0.5).bin().\
+            run(T1_sienax_segperiph)
 
-        wrappers.fslmaths(MNI_2mm_structseg).thr(4.5).bin().run(T1_sienax_segvent)
+        wrappers.fslmaths(MNI_2mm_structseg).thr(4.5).bin().\
+            run(T1_sienax_segvent)
         wrappers.applywarp(src=T1_sienax_segvent, ref=T1,
                            w=T1_to_MNI_warp_coef_inv, out=T1_sienax_segvent,
                            rel=True, interp='nn')
 
 
-        wrappers.fslmaths(T1_fast_pve_1).mas(T1_sienax_segperiph).run(T1_sienax_pve_1_segperiph)
+        wrappers.fslmaths(T1_fast_pve_1).mas(T1_sienax_segperiph).\
+            run(T1_sienax_pve_1_segperiph)
         V   = wrappers.fslstats(T1_sienax_pve_1_segperiph).m.v.run()
         xa  = float(V[0])
         xb  = float(V[2])
@@ -112,7 +116,8 @@ def run(ctx,
         report.append(f'pgrey              {xg} {uxg} (peripheral grey)')
 
 
-        wrappers.fslmaths(T1_fast_pve_0).mas(T1_sienax_segperiph).run(T1_sienax_pve_0_segperiph)
+        wrappers.fslmaths(T1_fast_pve_0).mas(T1_sienax_segperiph).\
+            run(T1_sienax_pve_0_segperiph)
         V   = wrappers.fslstats(T1_sienax_pve_0_segperiph).m.v.run()
         xa  = float(V[0])
         xb  = float(V[2])
diff --git a/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_apply_bfc.py b/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_apply_bfc.py
index a9ba82cccefde53aac5f4dfee04525362e464e54..3ed955aaabb42603b43932ce1ffde386e245fe09 100755
--- a/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_apply_bfc.py
+++ b/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_apply_bfc.py
@@ -12,9 +12,10 @@
 #
 
 import os
+import os.path as op
 import logging
 from fsl import wrappers
-from bip.utils.log_utils import redirect_logging
+from bip.utils.log_utils import redirect_logging, job_name
 from pipe_tree import In, Out, Ref
 
 log = logging.getLogger(__name__)
@@ -30,7 +31,7 @@ def run(ctx,
     with redirect_logging(job_name(run), outdir=logs_dir):
 
         #Apply bias field correction to T2_FLAIR warped
-        if os.path.isfile(T1_fast_brain_bias):
+        if op.isfile(T1_fast_brain_bias):
             wrappers.fslmaths(T2_FLAIR).div(T1_fast_brain_bias).run(T2_FLAIR_unbiased)
             wrappers.fslmaths(T2_FLAIR_brain).div(T1_fast_brain_bias).run(T2_FLAIR_unbiased_brain)
         else:
diff --git a/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_bianca.py b/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_bianca.py
index d85c2745e45bc6a7b8b619bc58e3a80a0fb3e96c..5d02a50aeb5c1f90e5a986e120e37b3e1bde8b4b 100755
--- a/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_bianca.py
+++ b/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_bianca.py
@@ -9,11 +9,12 @@
 # pylint: disable=W0613,C0301
 #
 
+import os.path as op
 import logging
 from shutil import copyfile
 from pipe_tree import In, Out, Ref
 from fsl import wrappers
-from bip.utils.log_utils import redirect_logging
+from bip.utils.log_utils import redirect_logging, job_name
 
 log = logging.getLogger(__name__)
 
diff --git a/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_brain_extract.py b/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_brain_extract.py
index 1fba52cd703e7275591c1bcc2cde5461ba46ca71..79034d2e8b87dfaa63d30dcddd6935fc477a3ae3 100755
--- a/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_brain_extract.py
+++ b/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_brain_extract.py
@@ -9,10 +9,11 @@
 # pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
 #
 
+import os.path as op
 import logging
 from shutil import copyfile
 from fsl import wrappers
-from bip.utils.log_utils import redirect_logging, tempdir
+from bip.utils.log_utils import redirect_logging, tempdir, job_name
 from pipe_tree import In, Out, Ref
 
 log = logging.getLogger(__name__)
@@ -39,10 +40,10 @@ def run(ctx,
         tmp_dir:                            Ref):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-         tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        T2_FLAIR_tmp_1_mat = tmp_dir + '/T2_FLAIR_tmp_1.mat'
-        T2_FLAIR_tmp_2_mat = tmp_dir + '/T2_FLAIR_tmp_2.mat'
+        T2_FLAIR_tmp_1_mat = op.join(tmp_dir, 'T2_FLAIR_tmp_1.mat')
+        T2_FLAIR_tmp_2_mat = op.join(tmp_dir, 'T2_FLAIR_tmp_2.mat')
 
         #Take T2 to T1 and also the brain mask
         wrappers.flirt(src=T2_FLAIR_orig_ud, ref=T1_orig_ud,
diff --git a/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_defacing.py b/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_defacing.py
index d06e5f2aa1b1a8bf19da9ab71cd42f7da7161923..9a9a3cd3094f0164ad42cf32337c36421182baf0 100755
--- a/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_defacing.py
+++ b/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_defacing.py
@@ -10,12 +10,12 @@
 # pylint: disable=W0613
 #
 
-
+import os.path as op
 import logging
 from shutil import copyfile
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
-from bip.utils.log_utils import redirect_logging, tempdir
+from bip.utils.log_utils import redirect_logging, tempdir, job_name
 
 log = logging.getLogger(__name__)
 
@@ -32,10 +32,10 @@ def run(ctx,
         tmp_dir:                          Ref):
 
     with redirect_logging(job_name(run), outdir=logs_dir),\
-         tempdir(tmp_dir):
+         tempdir(op.join(tmp_dir, job_name(run))):
 
-        T2_FLAIR_tmp_1    = tmp_dir + '/T2_FLAIR_tmp_1nii.gz'
-        T2_FLAIR_tmp_3_mat = tmp_dir + '/T2_FLAIR_tmp_3.mat'
+        T2_FLAIR_tmp_1     = op.join(tmp_dir, 'T2_FLAIR_tmp_1nii.gz')
+        T2_FLAIR_tmp_3_mat = op.join(tmp_dir, 'T2_FLAIR_tmp_3.mat')
 
         BigFoV_mask_mat = 'MNI/MNI_to_MNI_BigFoV_facemask.mat'
         BigFoV_mask     = 'MNI/MNI152_T1_1mm_BigFoV_facemask'
diff --git a/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_gdc.py b/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_gdc.py
index 7d1fed84d9a2baf2897a3dc0bc8c4078cb65c8c2..d4d910b173c7bca8efc3fbeefb871ac8e6d16bc8 100755
--- a/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_gdc.py
+++ b/bip/pipelines/struct_T2_FLAIR/T2_FLAIR_gdc.py
@@ -13,7 +13,7 @@
 import logging
 from shutil import copyfile
 from pipe_tree import In, Out, Ref
-from bip.utils.log_utils import redirect_logging
+from bip.utils.log_utils import redirect_logging, job_name
 from gradunwarp.core.gradient_unwarp_apply import gradient_unwarp_apply
 
 log = logging.getLogger(__name__)
diff --git a/bip/pipelines/struct_asl/asl_get_IDPs.py b/bip/pipelines/struct_asl/asl_get_IDPs.py
index 0a1dd4ebc731e6662e51192d5c00f4773a55959d..fff56fe70357f2e28daa8786217842e391e87727 100755
--- a/bip/pipelines/struct_asl/asl_get_IDPs.py
+++ b/bip/pipelines/struct_asl/asl_get_IDPs.py
@@ -10,9 +10,10 @@
 # pylint: disable=W0613,R1718
 #
 
+import os.path as op
 import logging
 from pipe_tree import In, Out, Ref
-from bip.utils.log_utils import redirect_logging
+from bip.utils.log_utils import redirect_logging, job_name
 
 log = logging.getLogger(__name__)
 
@@ -34,8 +35,8 @@ def run(ctx,
         regions = set([x[0] for x in asl_format])
 
         for region in regions:
-            file_name_1 = ASL_region_analysis_dir + "/" + region + ".csv"
-            file_name_2 = ASL_region_analysis_dir + "/" + region + ".txt"
+            file_name_1 = op.join(ASL_region_analysis_dir, region + ".csv")
+            file_name_2 = op.join(ASL_region_analysis_dir, region + ".txt")
 
             with open(file_name_1, 'rt', encoding="utf-8") as f:
                 file_data = f.read()
@@ -54,7 +55,7 @@ def run(ctx,
             row    = elem[1]
             column = elem[2]
 
-            file_name = ASL_region_analysis_dir + "/" + fil + ".txt"
+            file_name = op.join(ASL_region_analysis_dir, fil + ".txt")
 
             with open(file_name, 'rt', encoding="utf-8") as f:
                 file_data = [x.strip().split(",") for x in f.readlines()]
diff --git a/bip/pipelines/struct_asl/asl_proc.py b/bip/pipelines/struct_asl/asl_proc.py
index 4f785d42d823ca8ab23c44585d33346ee22b280a..9b3fb6ab6e599f6b7370d0ad3191ecb93b9e0c0d 100755
--- a/bip/pipelines/struct_asl/asl_proc.py
+++ b/bip/pipelines/struct_asl/asl_proc.py
@@ -11,13 +11,14 @@
 #
 
 import os
+import os.path as op
 import glob
 import logging
 from shutil import copyfile
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
 from gradunwarp.core.gradient_unwarp_apply import gradient_unwarp_apply
-from bip.utils.log_utils       import redirect_logging
+from bip.utils.log_utils       import redirect_logging, job_name
 from bip.utils.read_json_field import read_json_field
 
 log = logging.getLogger(__name__)
@@ -64,7 +65,7 @@ def run(ctx,
         ###############################################
         list_PEDs = []
 
-        for fil in glob.glob(ASL_raw_dir + "/*.json"):
+        for fil in glob.glob(op.join(ASL_raw_dir, "*.json")):
             list_PEDs.append(read_json_field(fileName=fil,
                                             fieldName="PhaseEncodingDirection"))
 
@@ -88,8 +89,11 @@ def run(ctx,
         wrappers.fslmerge("t", ASL_label, *list_fils_label)
         wrappers.fslmerge("t", ASL_DATA_wrongorder, ASL_label, ASL_control)
 
-        os.symlink("../raw/ASL_M0.nii.gz", CALIB)
-        os.symlink("../raw/ASL_M0.json",   CALIB_json)
+        rel_path = op.relpath(ASL_M0,CALIB)
+        os.symlink(rel_path, CALIB)
+
+        rel_path = op.relpath(ASL_M0_json, CALIB_json)
+        os.symlink(rel_path, CALIB_json)
 
         TE = read_json_field(fileName=CALIB_json, fieldName="EchoTime",
                              rounding = 3, multFactor=1000)
diff --git a/bip/pipelines/struct_swMRI/swMRI_gdc.py b/bip/pipelines/struct_swMRI/swMRI_gdc.py
index daa322d8a20573152ba90aa387fc07d18c6bfe0a..a0b700090751a2e51d92547155db9bffa5b342bc 100755
--- a/bip/pipelines/struct_swMRI/swMRI_gdc.py
+++ b/bip/pipelines/struct_swMRI/swMRI_gdc.py
@@ -12,7 +12,7 @@
 import logging
 from shutil import copyfile
 from pipe_tree import In, Out, Ref
-from bip.utils.log_utils import redirect_logging
+from bip.utils.log_utils import redirect_logging, job_name
 from gradunwarp.core.gradient_unwarp_apply import gradient_unwarp_apply
 
 log = logging.getLogger(__name__)
diff --git a/bip/pipelines/struct_swMRI/swMRI_proc.py b/bip/pipelines/struct_swMRI/swMRI_proc.py
index c75c4728d9ffaaa6a7115fd07240898808ccd71c..08c5b20430d93e8d36665740934d98f40d849719 100755
--- a/bip/pipelines/struct_swMRI/swMRI_proc.py
+++ b/bip/pipelines/struct_swMRI/swMRI_proc.py
@@ -11,11 +11,12 @@
 #
 
 import os
+import os.path as op
 import logging
 from shutil import copyfile
 from fsl import wrappers
 from pipe_tree import In, Out, Ref
-from bip.utils.log_utils import redirect_logging
+from bip.utils.log_utils import redirect_logging, job_name
 from bip.pipelines.struct_swMRI.swMRI_proc_fnc import combine_magnitude_coils
 from bip.pipelines.struct_swMRI.swMRI_proc_fnc import gen_filtered_phase
 from gradunwarp.core.gradient_unwarp_apply import gradient_unwarp_apply
@@ -63,13 +64,13 @@ def run(ctx,
                 complex_phase = True
 
         MAG_TE1 = SWI_MAG_TE1_orig
-        if os.path.exists(SWI_MAG_TE2_orig):
+        if op.exists(SWI_MAG_TE2_orig):
             MAG_TE2 = SWI_MAG_TE2_orig
         else:
             MAG_TE2 = ""
 
         if num_coils > 0:
-            if os.path.exists(SWI_MAG_TE1_C01):
+            if op.exists(SWI_MAG_TE1_C01):
                 combine_magnitude_coils(MAG_TE1, MAG_TE2, MAG_TE1_dir,
                                         MAG_TE2_dir, num_coils, SOS_TE1,
                                         SOS_TE2, SOS_ratio, R2star, T2star)
@@ -112,7 +113,7 @@ def run(ctx,
         #TODO: Change name of SWI to venogram?
         for fil in [R2star, T2star, SOS_TE1, SOS_TE2, SOS_ratio, filtered_phase,
                     SWI]:
-            if os.path.exists(fil):
+            if op.exists(fil):
                 if ctx.gdc != '':
                     wrappers.applywarp(src=fil, ref=fil, out=fil, rel=True,
                                        w=SWI_TOTAL_MAG_orig_ud_warp,
diff --git a/bip/pipelines/struct_swMRI/swMRI_proc_fnc.py b/bip/pipelines/struct_swMRI/swMRI_proc_fnc.py
index 17afae1bf3c6ee05f6b7a56026693e88ac9077c8..3e957ee0242072ec2e30641dd535d08d8335f953 100755
--- a/bip/pipelines/struct_swMRI/swMRI_proc_fnc.py
+++ b/bip/pipelines/struct_swMRI/swMRI_proc_fnc.py
@@ -9,6 +9,7 @@
 # pylint: disable=C0103,E0602,C0114,C0115,C0116,R0913,R0914,R0915
 #
 
+import os.path as op
 import glob
 import nibabel as nib
 from fsl import wrappers
@@ -34,13 +35,13 @@ def combine_magnitude_coils(MAG_TE1, MAG_TE2, MAG_TE1_dir, MAG_TE2_dir,
 
     if not num_coils == 0:
 
-        files_TE1 = glob.glob(MAG_TE1_dir + '/*.nii.gz')
+        files_TE1 = glob.glob(op.join(MAG_TE1_dir, '*.nii.gz'))
         wrappers.fslmaths(files_TE1[0]).mul(0).run(SOS_TE1)
         for file_TE1 in files_TE1:
             wrappers.fslmaths(file_TE1).sqr().add(SOS_TE1).run(SOS_TE1)
         wrappers.fslmaths(SOS_TE1).sqrt().run(SOS_TE1)
 
-        files_TE2 = glob.glob(MAG_TE2_dir + '/*.nii.gz')
+        files_TE2 = glob.glob(op.join(MAG_TE2_dir, '*.nii.gz'))
         wrappers.fslmaths(files_TE2[0]).mul(0).run(SOS_TE2)
         for file_TE2 in files_TE2:
             wrappers.fslmaths(file_TE2).sqr().add(SOS_TE2).run(SOS_TE2)
@@ -57,8 +58,8 @@ def combine_magnitude_coils(MAG_TE1, MAG_TE2, MAG_TE1_dir, MAG_TE2_dir,
 
 def gen_filtered_phase(ctx, magImgDir, phaImgDir, magImgFileName, fp_fn,SWI_fn):
 
-    magImgFiles=glob.glob(magImgDir + '/*.nii.gz')
-    phaImgFiles=glob.glob(phaImgDir + '/*.nii.gz')
+    magImgFiles=glob.glob(op.join(magImgDir, '*.nii.gz'))
+    phaImgFiles=glob.glob(op.join(phaImgDir, '*.nii.gz'))
     magImgFiles.sort()
     phaImgFiles.sort()