Commit dc2884c3 authored by Christoph Arthofer's avatar Christoph Arthofer
Browse files

inverse mask creation, intensity normalisation=False, delete all temporary files when clamping

parent 5532b5c6
......@@ -24,7 +24,7 @@ from fsl.utils.fslsub import func_to_cmd
from operator import itemgetter
import tempfile
import argparse
import fsl_sub
# import fsl_sub
def writeConfig(step,mod,fpath):
"""! Writes the nonlinear registration parameters for a given resolution level and modalities to a file readable by MMORF.
......@@ -433,91 +433,91 @@ def applyWarpWrapper(img_path, ref_path, warped_path, warp_path, interp='spline'
# https://git.fmrib.ox.ac.uk/fsl/fsl_sub
def submitJob(command, name, log_dir, queue, wait_for=None, array_task=False, coprocessor=None, coprocessor_class=None, coprocessor_multi="1", threads=1, export_var=None):
coprocessor_class_strict = True if coprocessor_class is not None else False
job_id = fsl_sub.submit(command=command,
array_task=array_task,
jobhold=wait_for,
name=name,
logdir=log_dir,
queue=queue,
coprocessor=coprocessor,
coprocessor_class=coprocessor_class,
coprocessor_class_strict=coprocessor_class_strict,
coprocessor_multi=coprocessor_multi,
threads=threads,
export_vars=export_var
)
return job_id
# def submitJob(name, log_dir, queue, wait_for=[], script=None, command=None, coprocessor_class=None,
# coprocessor=None, export_var=None, debug=False):
# """! Wrapper for fslsub - submits a job to the cluster. This function can be easily extended to work with other workload managers.
#
# @param name: Job name
# @param log_dir: Directory where output log-files will be saved
# @param queue: Name of queue to submit the job to
# @param wait_for: List of IDs of jobs required to finish before running this job.
# @param script: Path to a shell script, which contains one command per line - commands will be submitted as an array job
# @param command: Alternatively a single command can be provided as a string - command will be submitted as single job
# @param coprocessor_class: Coprocessor class
# @param export_var: Environment variables to be exported to the submission node
# @param debug: If True, information about job will be written to output
#
# @return The job ID.
# """
# cmd = 'fsl_sub'
#
# if wait_for:
# job_ids_bool = [job != '' for job in wait_for]
# if any(job_ids_bool):
# cmd += ' -j '
# wait_for_arr = np.array(wait_for)
# wait_for_arr = wait_for_arr[job_ids_bool]
# for j, job in enumerate(wait_for_arr):
# cmd += job.replace("\n", "")
# if j < len(wait_for_arr) - 1:
# cmd += ','
#
# cmd += ' -N ' + name + \
# ' -l ' + log_dir + \
# ' -q ' + queue
#
# if coprocessor_class is not None:
# cmd += ' --coprocessor_class ' + coprocessor_class
# cmd += ' --coprocessor_class_strict '
# if coprocessor is not None:
# cmd += ' --coprocessor ' + coprocessor + ' -R 32'
#
# if export_var is not None :
# cmd += ' --export ' + export_var
# def submitJob(command, name, log_dir, queue, wait_for=None, array_task=False, coprocessor=None, coprocessor_class=None, coprocessor_multi="1", threads=1, export_var=None):
# coprocessor_class_strict = True if coprocessor_class is not None else False
#
# if debug:
# cmd += ' --debug'
#
# if script is not None and os.path.exists(script):
# cmd += ' -t ' + script
# elif command is not None :
# cmd += ' ' + command + ' '
# # cmd += ' "' + command + '"'
#
# # stream = os.popen(cmd)
# # job_id = stream.read()
#
# try:
# result = subprocess.run(shlex.split(cmd), capture_output=True, text=True, check=True)
# except subprocess.CalledProcessError as e:
# print(str(e), file=sys.stderr)
# return None
#
# job_id = result.stdout.strip()
# job_id = fsl_sub.submit(command=command,
# array_task=array_task,
# jobhold=wait_for,
# name=name,
# logdir=log_dir,
# queue=queue,
# coprocessor=coprocessor,
# coprocessor_class=coprocessor_class,
# coprocessor_class_strict=coprocessor_class_strict,
# coprocessor_multi=coprocessor_multi,
# threads=threads,
# export_vars=export_var
# )
#
# return job_id
def submitJob(name, log_dir, queue, wait_for=[], script=None, command=None, coprocessor_class=None,
coprocessor=None, export_var=None, debug=False):
"""! Wrapper for fslsub - submits a job to the cluster. This function can be easily extended to work with other workload managers.
@param name: Job name
@param log_dir: Directory where output log-files will be saved
@param queue: Name of queue to submit the job to
@param wait_for: List of IDs of jobs required to finish before running this job.
@param script: Path to a shell script, which contains one command per line - commands will be submitted as an array job
@param command: Alternatively a single command can be provided as a string - command will be submitted as single job
@param coprocessor_class: Coprocessor class
@param export_var: Environment variables to be exported to the submission node
@param debug: If True, information about job will be written to output
@return The job ID.
"""
cmd = 'fsl_sub'
if wait_for:
job_ids_bool = [job != '' for job in wait_for]
if any(job_ids_bool):
cmd += ' -j '
wait_for_arr = np.array(wait_for)
wait_for_arr = wait_for_arr[job_ids_bool]
for j, job in enumerate(wait_for_arr):
cmd += job.replace("\n", "")
if j < len(wait_for_arr) - 1:
cmd += ','
cmd += ' -N ' + name + \
' -l ' + log_dir + \
' -q ' + queue
if coprocessor_class is not None:
cmd += ' --coprocessor_class ' + coprocessor_class
cmd += ' --coprocessor_class_strict '
if coprocessor is not None:
cmd += ' --coprocessor ' + coprocessor + ' -R 32'
if export_var is not None :
cmd += ' --export ' + export_var
if debug:
cmd += ' --debug'
if script is not None and os.path.exists(script):
cmd += ' -t ' + script
elif command is not None :
cmd += ' ' + command + ' '
# cmd += ' "' + command + '"'
# stream = os.popen(cmd)
# job_id = stream.read()
try:
result = subprocess.run(shlex.split(cmd), capture_output=True, text=True, check=True)
except subprocess.CalledProcessError as e:
print(str(e), file=sys.stderr)
return None
job_id = result.stdout.strip()
return job_id
def RMSdifference(img1_path, img2_path, mask1_path=None, mask2_path=None, rms_path=None):
"""! Calculates the difference between two images or warps as the root mean squared (RMS)
......@@ -854,8 +854,8 @@ if __name__ == "__main__":
jobcmd = jobcmd + '\n'
jobcmd = jobcmd + 'fslmaths ' + tree.get('data/lesion_mask_in_T1') + ' -bin ' + tree.get('inverse_lesion_mask_in_T1') + '\n'
f.write(jobcmd)
# job_ids[0] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq)
job_ids[0] = submitJob(script_path, tag+'_'+task_name, log_dir, queue=cpuq, wait_for=None, array_task=True)
job_ids[0] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq)
# job_ids[0] = submitJob(script_path, tag+'_'+task_name, log_dir, queue=cpuq, wait_for=None, array_task=True)
print('submitted: ' + task_name)
# Register all individual images to one reference image
......@@ -873,8 +873,8 @@ if __name__ == "__main__":
omat=tree.get('T1_to_ref_mat', make_dir=True), dof=6, cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[1] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq)
job_ids[1] = submitJob(script_path, tag+'_'+task_name, log_dir, queue=cpuq, wait_for=None, array_task=True)
job_ids[1] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq)
# job_ids[1] = submitJob(script_path, tag+'_'+task_name, log_dir, queue=cpuq, wait_for=None, array_task=True)
print('submitted: ' + task_name)
# Register T2 images to corresponding T1 images
......@@ -889,8 +889,8 @@ if __name__ == "__main__":
dof=6, cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[2] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq)
job_ids[2] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=None, array_task=True)
job_ids[2] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq)
# job_ids[2] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=None, array_task=True)
print('submitted: ' + task_name)
......@@ -906,9 +906,8 @@ if __name__ == "__main__":
dof=6, cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[3] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq)
job_ids[3] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=None,
array_task=True)
job_ids[3] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq)
# job_ids[3] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=None, array_task=True)
print('submitted: ' + task_name)
......@@ -931,8 +930,8 @@ if __name__ == "__main__":
tmp_dir=script_dir,
kwargs=None,
clean="never")
# job_ids[4] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[1]])
job_ids[4] = submitJob(jobcmd, tag+'_'+task_name, log_dir, queue=cpuq, wait_for=[job_ids[1]], array_task=False)
job_ids[4] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[1]])
# job_ids[4] = submitJob(jobcmd, tag+'_'+task_name, log_dir, queue=cpuq, wait_for=[job_ids[1]], array_task=False)
print('submitted: ' + task_name)
......@@ -947,9 +946,8 @@ if __name__ == "__main__":
out=tree.get('T1_to_unbiased_img'), interp='spline', cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[5] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[4]])
job_ids[5] = submitJob(script_path, tag+'_'+task_name, log_dir, queue=cpuq, wait_for=[job_ids[4]], array_task=True)
job_ids[5] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[4]])
# job_ids[5] = submitJob(script_path, tag+'_'+task_name, log_dir, queue=cpuq, wait_for=[job_ids[4]], array_task=True)
print('submitted: ' + task_name)
# Concat T2_to_T1 and T1_to_unbiased
......@@ -964,10 +962,8 @@ if __name__ == "__main__":
tree.get('T2_to_unbiased_mat'), cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[6] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[4]])
job_ids[6] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=list(itemgetter(*[2,4])(job_ids)),
array_task=True)
job_ids[6] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[4]])
# job_ids[6] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=list(itemgetter(*[2,4])(job_ids)), array_task=True)
print('submitted: ' + task_name)
# Concat DTI_to_T2 and T2_to_unbiased
......@@ -982,10 +978,8 @@ if __name__ == "__main__":
tree.get('DTI_to_unbiased_mat'), cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[7] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[6]])
job_ids[7] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[6]],
array_task=True)
job_ids[7] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[6]])
# job_ids[7] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[6]], array_task=True)
print('submitted: ' + task_name)
# Averaging unbiased T1 images
......@@ -999,9 +993,8 @@ if __name__ == "__main__":
jobcmd = func_to_cmd(averageImages, args=(img_paths, aff_template_path, 'median', False), tmp_dir=script_dir, kwargs=None,
clean="never")
# job_ids[8] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[5]])
job_ids[8] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[5]],
array_task=False)
job_ids[8] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[5]])
# job_ids[8] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[5]], array_task=False)
print('submitted: ' + task_name)
# Register unbiased template to MNI space with 6 dof
......@@ -1010,9 +1003,8 @@ if __name__ == "__main__":
cmd = flirt(aff_template_path, mni_path, omat=tree.get('T1_unbiased_affine_template_to_MNI_mat'),
out=tree.get('T1_unbiased_affine_template_to_MNI_img'), dof=6, cmdonly=True)
cmd = ' '.join(cmd) + '\n'
# job_ids[9] = submitJob(tag+'_'+task_name, log_dir, command=cmd, queue=cpuq, wait_for=[job_ids[8]])
job_ids[9] = submitJob(cmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[8]],
array_task=False)
job_ids[9] = submitJob(tag+'_'+task_name, log_dir, command=cmd, queue=cpuq, wait_for=[job_ids[8]])
# job_ids[9] = submitJob(cmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[8]], array_task=False)
print('submitted: ' + task_name)
# Concatenate individual affine transformations (T1 brain to unbiased T1 and the rigid transformation to MNI)
......@@ -1026,10 +1018,8 @@ if __name__ == "__main__":
tree.get('T1_to_MNI_mat', make_dir=True), cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[10] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[9]])
job_ids[10] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[9]],
array_task=True)
job_ids[10] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[9]])
# job_ids[10] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[9]], array_task=True)
print('submitted: ' + task_name)
# Concatenate individual affine transformations (T2 brain to unbiased T2 and the rigid transformation to MNI)
......@@ -1044,10 +1034,8 @@ if __name__ == "__main__":
tree.get('T2_to_MNI_mat'), cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[12] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[9]])
job_ids[12] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[9]],
array_task=True)
job_ids[12] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[9]])
# job_ids[12] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[9]], array_task=True)
print('submitted: ' + task_name)
# Concatenate individual affine transformations (DTI to unbiased T2 and the rigid transformation to MNI)
......@@ -1062,10 +1050,8 @@ if __name__ == "__main__":
tree.get('DTI_to_MNI_mat'), cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[14] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[9]])
job_ids[14] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[9]],
array_task=True)
job_ids[14] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[9]])
# job_ids[14] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[9]], array_task=True)
print('submitted: ' + task_name)
# Transform individual T1 brain images to MNI space
......@@ -1080,10 +1066,8 @@ if __name__ == "__main__":
out=tree.get('T1_brain_to_MNI_img'), interp='spline', cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[15] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[10]])
job_ids[15] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[10]],
array_task=True)
job_ids[15] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[10]])
# job_ids[15] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[10]], array_task=True)
print('submitted: ' + task_name)
# Transform individual T1 brain masks to MNI space
......@@ -1099,10 +1083,8 @@ if __name__ == "__main__":
out=tree.get('T1_brain_mask_to_MNI_img'), interp='trilinear', cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[16] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[10]])
job_ids[16] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[10]],
array_task=True)
job_ids[16] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[10]])
# job_ids[16] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[10]], array_task=True)
print('submitted: ' + task_name)
# Transform individual T1 head images to MNI space
......@@ -1117,10 +1099,8 @@ if __name__ == "__main__":
out=tree.get('T1_head_to_MNI_img'), interp='spline', cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[17] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=list(itemgetter(*[0, 10])(job_ids)))
job_ids[17] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=list(itemgetter(*[0, 10])(job_ids)),
array_task=True)
job_ids[17] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=list(itemgetter(*[0, 10])(job_ids)))
# job_ids[17] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=list(itemgetter(*[0, 10])(job_ids)), array_task=True)
print('submitted: ' + task_name)
# Transform individual T2 head images to MNI space
......@@ -1135,10 +1115,8 @@ if __name__ == "__main__":
out=tree.get('T2_head_to_MNI_img'), interp='spline', cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[18] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[12]])
job_ids[18] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[12]],
array_task=True)
job_ids[18] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[12]])
# job_ids[18] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[12]], array_task=True)
print('submitted: ' + task_name)
# Transform individual DTI images to MNI space
......@@ -1153,10 +1131,8 @@ if __name__ == "__main__":
out=tree.get('DTI_to_MNI_img'), interp='spline', cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[19] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[14]])
job_ids[19] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[14]],
array_task=True)
job_ids[19] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[14]])
# job_ids[19] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[14]], array_task=True)
print('submitted: ' + task_name)
# Transform individual DTI tensors to MNI space
......@@ -1173,10 +1149,8 @@ if __name__ == "__main__":
' -t ' + tree.get('DTI_to_MNI_mat') + \
' --interp=spline \n'
f.write(cmd)
# job_ids[20] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[14]])
job_ids[20] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[14]],
array_task=True)
job_ids[20] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[14]])
# job_ids[20] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[14]], array_task=True)
print('submitted: ' + task_name)
# Averaging transformed T1 brain images in MNI space
......@@ -1191,10 +1165,8 @@ if __name__ == "__main__":
jobcmd = func_to_cmd(averageImages, args=(img_paths, aff_template_path, 'median', False), tmp_dir=script_dir,
kwargs=None, clean="never")
# job_ids[21] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq,
# wait_for=[job_ids[15]])
job_ids[21] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[15]],
array_task=False)
job_ids[21] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[15]])
# job_ids[21] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[15]], array_task=False)
print('submitted: ' + task_name)
# Averaging transformed T1 brain masks in MNI space
......@@ -1209,17 +1181,14 @@ if __name__ == "__main__":
jobcmd = func_to_cmd(averageImages, args=(img_paths, aff_template_path, 'average', False), tmp_dir=script_dir,
kwargs=None, clean="never")
# job_ids[22] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq,
# wait_for=[job_ids[16]])
job_ids[22] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[16]],
array_task=False)
job_ids[22] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[16]])
# job_ids[22] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[16]], array_task=False)
print('submitted: ' + task_name)
task_name = '{:03d}_affT_create_weighted_brain_mask'.format(task_count)
jobcmd = 'fslmaths ' + aff_template_path + ' -bin -mul 7 -add 1 -inm 1 ' + tree.get('T1_brain_mask_weighted_affine_template')
# job_ids[23] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[22]])
job_ids[23] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[22]],
array_task=False)
job_ids[23] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[22]])
# job_ids[23] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[22]], array_task=False)
print('submitted: ' + task_name)
# Averaging transformed T1 non-defaced whole-head images in MNI space
......@@ -1234,10 +1203,8 @@ if __name__ == "__main__":
jobcmd = func_to_cmd(averageImages, args=(img_paths, aff_template_path, 'median', False), tmp_dir=script_dir,
kwargs=None, clean="never")
# job_ids[24] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq,
# wait_for=[job_ids[17]])
job_ids[24] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[17]],
array_task=False)
job_ids[24] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[17]])
# job_ids[24] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[17]], array_task=False)
print('submitted: ' + task_name)
# Averaging transformed T2 non-defaced whole-head images in MNI space
......@@ -1252,10 +1219,8 @@ if __name__ == "__main__":
jobcmd = func_to_cmd(averageImages, args=(img_paths, aff_template_path, 'median', False), tmp_dir=script_dir,
kwargs=None, clean="never")
# job_ids[25] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq,
# wait_for=[job_ids[18]])
job_ids[25] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[18]],
array_task=False)
job_ids[25] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[18]])
# job_ids[25] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[18]], array_task=False)
print('submitted: ' + task_name)
# Averaging transformed DTI images in MNI space
......@@ -1270,10 +1235,8 @@ if __name__ == "__main__":
jobcmd = func_to_cmd(averageImages, args=(img_paths, aff_template_path, 'median', False), tmp_dir=script_dir,
kwargs=None, clean="never")
# job_ids[26] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq,
# wait_for=[job_ids[19]])
job_ids[26] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[19]],
array_task=False)
job_ids[26] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[19]])
# job_ids[26] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[19]], array_task=False)
print('submitted: ' + task_name)
# Averaging transformed DTI tensors in MNI space
......@@ -1292,10 +1255,8 @@ if __name__ == "__main__":
common_path = os.path.commonpath(export_paths)
export_var_str = {'SINGULARITY_BIND': '"SINGULARITY_BIND=' + ','.join([common_path]) + '"'}
# job_ids[27] = submitJob(tag+'_'+task_name, log_dir, command=cmd, queue=cpuq,
# export_var=export_var_str['SINGULARITY_BIND'], wait_for=[job_ids[20]])
job_ids[27] = submitJob(cmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[20]],
array_task=False, export_var=[export_var_str['SINGULARITY_BIND']])
job_ids[27] = submitJob(tag+'_'+task_name, log_dir, command=cmd, queue=cpuq, export_var=export_var_str['SINGULARITY_BIND'], wait_for=[job_ids[20]])
# job_ids[27] = submitJob(cmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[20]], array_task=False, export_var=[export_var_str['SINGULARITY_BIND']])
print('submitted: ' + task_name)
# Nonlinear template construction
......@@ -1378,7 +1339,7 @@ if __name__ == "__main__":
aff_ref_scalar = [tree.get('identity_mat')]
aff_mov_scalar = [tree.get('T1_to_MNI_mat')]
mask_ref_scalar = [img_ref_T1brain_mask_path]
mask_mov_scalar = [tree.get('data/inverse_lesion_mask_in_T1')]
mask_mov_scalar = [tree.get('inverse_lesion_mask_in_T1')]
img_ref_tensor = []
img_mov_tensor = []
aff_ref_tensor = []
......@@ -1415,13 +1376,13 @@ if __name__ == "__main__":
common_path = os.path.commonpath(value)
export_var_str[key] = '"' + key + '=' + ','.join([common_path]) + '"'
# job_ids[28] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=gpuq,
# wait_for=list(itemgetter(*[21, 23, 24, 25, 26, 27, 28, 44, 45, 46, 47, 48, 50])(job_ids)),
# coprocessor_class=None, coprocessor='cuda',
# export_var=export_var_str['SINGULARITY_BIND'], debug=False)
job_ids[28] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=gpuq,
job_ids[28] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=gpuq,
wait_for=list(itemgetter(*[21, 23, 24, 25, 26, 27, 28, 44, 45, 46, 47, 48, 50])(job_ids)),
array_task=True, coprocessor='cuda', coprocessor_class=None, coprocessor_multi="1", threads=1, export_var=[export_var_str['SINGULARITY_BIND']])
coprocessor_class=None, coprocessor='cuda',
export_var=export_var_str['SINGULARITY_BIND'], debug=False)
# job_ids[28] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=gpuq,
# wait_for=list(itemgetter(*[21, 23, 24, 25, 26, 27, 28, 44, 45, 46, 47, 48, 50])(job_ids)),
# array_task=True, coprocessor='cuda', coprocessor_class=None, coprocessor_multi="1", threads=1, export_var=[export_var_str['SINGULARITY_BIND']])
print('submitted: ' + task_name)
# Averaging warps
......@@ -1435,10 +1396,8 @@ if __name__ == "__main__":
jobcmd = func_to_cmd(averageImages, args=(warp_paths, avg_warp_path, 'average', False), tmp_dir=script_dir,
kwargs=None, clean="never")
# job_ids[29] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq,
# wait_for=[job_ids[28]])
job_ids[29] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq,wait_for=[job_ids[28]],
array_task=False)
job_ids[29] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[28]])
# job_ids[29] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq,wait_for=[job_ids[28]], array_task=False)
print('submitted: ' + task_name)
# Inverse average warp
......@@ -1448,10 +1407,8 @@ if __name__ == "__main__":
inv_avg_warp_path = tree.get('inv_avg_warp')
cmd = invwarp(warp=avg_warp_path, ref=img_ref_T1head_path, out=inv_avg_warp_path, cmdonly=True)
cmd = ' '.join(cmd) + '\n'
# job_ids[30] = submitJob(tag+'_'+task_name, log_dir, command=cmd, queue=cpuq,
# wait_for=[job_ids[29]])
job_ids[30] = submitJob(cmd, tag + '_' + task_name, log_dir, queue=cpuq,wait_for=[job_ids[29]],
array_task=False)
job_ids[30] = submitJob(tag+'_'+task_name, log_dir, command=cmd, queue=cpuq, wait_for=[job_ids[29]])
# job_ids[30] = submitJob(cmd, tag + '_' + task_name, log_dir, queue=cpuq,wait_for=[job_ids[29]], array_task=False)
print('submitted: ' + task_name)
# Create unbiased warps: (1) resample forward warp with inverse average warp and (2) add inverse average warp to resulting composition
......@@ -1468,10 +1425,8 @@ if __name__ == "__main__":
warp_path, img_ref_T1head_path, resampled_path, inv_avg_warp_path, 'spline', False),
tmp_dir=script_dir, kwargs=None, clean="never")
f.write(jobcmd + '\n')
# job_ids[31] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[30]])
job_ids[31] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[30]],
array_task=True)
job_ids[31] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[30]])
# job_ids[31] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[30]], array_task=True)
print('submitted: ' + task_name)
task_count += 1
......@@ -1482,10 +1437,8 @@ if __name__ == "__main__":
tree = tree.update(sub_id=id, step_id='{:02d}'.format(step), it_id='{:02d}'.format(it))
f.write('fslmaths ' + tree.get('mmorf_warp_resampled') + ' -add ' + tree.get(
'inv_avg_warp') + ' ' + tree.get('mmorf_warp_resampled_unbiased') + '\n')
# job_ids[32] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[31]])
job_ids[32] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[31]],
array_task=True)
job_ids[32] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[31]])
# job_ids[32] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[31]], array_task=True)
print('submitted: ' + task_name)
# Concatenate corresponding affine transforms and unbiased warps
......@@ -1503,10 +1456,8 @@ if __name__ == "__main__":
warp1=warp_path, cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[33] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[32]])
job_ids[33] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[32]],
array_task=True)
job_ids[33] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[32]])
# job_ids[33] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[32]], array_task=True)
print('submitted: ' + task_name)
# T1 head
......@@ -1524,10 +1475,8 @@ if __name__ == "__main__":
warp1=warp_path, cmdonly=True)
cmd = ' '.join(cmd) + '\n'
f.write(cmd)
# job_ids[34] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq,
# wait_for=[job_ids[32]])
job_ids[34] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq,
wait_for=[job_ids[32]], array_task=True)
job_ids[34] = submitJob(tag+'_'+task_name, log_dir, script=script_path, queue=cpuq, wait_for=[job_ids[32]])
# job_ids[34] = submitJob(script_path, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[32]], array_task=True)
print('submitted: ' + task_name)
# T2 head
......@@ -1545,10 +1494,8 @@ if __name__ == "__main__":