Commit f3cad559 authored by Christoph Arthofer's avatar Christoph Arthofer
Browse files

Added intersectional template masking and different clamping for last iteration

parent a07ee75a
......@@ -378,7 +378,7 @@ def soft_clamp(x, k):
[lambda x: 0, lambda x: x, lambda x: k / (2 * (1 + np.exp(-8 * (x - k) / k))) + 0.75 * k])
def clampImage(img_path, out_path):
def run_soft_clamp(img_path, out_path):
"""! Performs preprocessing steps and clamping on an image.
@param img_path: Path to input image
......@@ -400,6 +400,41 @@ def clampImage(img_path, out_path):
img_clamped_nib = nib.Nifti1Image(img_clamped_np, affine=img_nib.affine, header=img_nib.header)
img_clamped_nib.to_filename(out_path)
def hard_clamp(x, k):
"""! Adapted from soft_clamp by FJ Lange; Piecewise function for soft intensity clamping of T1 images. Takes a single parameter k which defines the transition to the clamping part of the function.
f(x) = 0 | x <= 0
f(x) = x | 0 < x <= k
f(x) = k | x > k
@param x: Image as numpy array
@param k: Defines the transition to the clamping part of the function
Date: 08/12/2021
Author: C Arthofer
Copyright: FMRIB 2021
"""
return np.piecewise(x,
[x <= 0, (0 < x) & (x <= k), x > k],
[lambda x: 0, lambda x: x, lambda x: k])
def run_hard_clamp(img_path, out_path):
"""! Performs preprocessing steps and clamping on an image.
@param img_path: Path to input image
@param out_path: Path to clamped output image
"""
img_nib = nib.load(img_path)
robustmin, robustmax = fslstats(img_nib).r.run()
img_clamped_np = hard_clamp(img_nib.get_fdata(), robustmax)
img_clamped_nib = nib.Nifti1Image(img_clamped_np, affine=img_nib.affine, header=img_nib.header)
img_clamped_nib.to_filename(out_path)
def maskPreprocessing(tree):
fslmaths(tree.get('data/T1_brain_mask')).thr(0.1).bin().mul(7).add(1).inm(1).run(tree.get('T1_weighted_brain_mask'))
fslmaths(tree.get('data/lesion_mask_in_T1')).binv().mul(tree.get('T1_weighted_brain_mask')).run(tree.get('inverse_lesion_and_brain_mask_in_T1'))
......@@ -447,6 +482,23 @@ def averageImages(img_paths, out_path, mod='mean', norm_bool=False):
median_img = np.median(np.array(images),axis=0)
median_nib = nib.Nifti1Image(np.squeeze(median_img), affine=img_nib.affine, header=img_nib.header)
median_nib.to_filename(out_path)
elif mod == 'intersection':
for i, img_path in enumerate(img_paths):
if os.path.exists(img_path):
n_exist += 1
print(i, ' ', img_path)
img_nib = nib.load(img_path)
if norm_bool:
img_nib = fslmaths(img_nib).inm(1000).run()
if i == 0:
sum_img = img_nib
else:
sum_img = fslmaths(sum_img).add(img_nib).run()
else:
print(i, ' ', img_path, ' does not exist!')
if n_exist > 0:
fslmaths(sum_img).div(n_exist).thr(1).run(out_path)
assert n_exist == n_imgs, "Not all images available!"
......@@ -468,7 +520,7 @@ def applyWarpWrapper(img_path, ref_path, warped_path, warp_path, interp='spline'
img_nib = nib.load(img_path)
if norm_bool:
img_nib = fslmaths(img_nib).inm(1000).run()
print('applywarp(src=img_nib,ref=ref_path,out=warped_path,warp=warp_path,interp=interp)')
print('applywarp(src=img_nib, ref='+ref_path+',out='+warped_path+',warp='+warp_path+',interp='+interp+')')
applywarp(src=img_nib, ref=ref_path, out=warped_path, warp=warp_path, interp=interp)
......@@ -903,12 +955,18 @@ if __name__ == "__main__":
tree = tree.update(sub_id=id)
T1_head_path = tree.get(mod['T1_head_key'])
T1_clamped_path = tree.get('T1_head_clamped', make_dir=True)
T1_clamped_hard_path = tree.get('T1_head_clamped_hard')
jobcmd = func_to_cmd(clampImage,
jobcmd = func_to_cmd(run_soft_clamp,
args=(T1_head_path, T1_clamped_path),
tmp_dir=script_dir,
kwargs=None,
clean="never") + '\n'
jobcmd += func_to_cmd(run_hard_clamp,
args=(T1_head_path, T1_clamped_hard_path),
tmp_dir=script_dir,
kwargs=None,
clean="never") + '\n'
jobcmd += func_to_cmd(maskPreprocessing,
args=(tree,),
tmp_dir=script_dir,
......@@ -1471,7 +1529,7 @@ if __name__ == "__main__":
cmd = invwarp(warp=avg_warp_path, ref=img_ref_T1head_path, out=inv_avg_warp_path, cmdonly=True)
cmd = ' '.join(cmd) + '\n'
# job_ids[30] = submitJob(tag+'_'+task_name, log_dir, command=cmd, queue=cpuq, wait_for=[job_ids[29]])
job_ids[30] = submitJob(cmd, tag + '_' + task_name, log_dir, queue=cpuq,wait_for=[job_ids[29]], array_task=False, jobram=jobram_low, jobtime=jobtime_low)
job_ids[30] = submitJob(cmd, tag + '_' + task_name, log_dir, queue=cpuq,wait_for=[job_ids[29]], array_task=False, jobram=jobram_low, jobtime=jobtime_high)
print('submitted: ' + task_name)
# Create unbiased warps: (1) resample forward warp with inverse average warp and (2) add inverse average warp to resulting composition
......@@ -1624,7 +1682,8 @@ if __name__ == "__main__":
task_name = '{:03d}_nlnT_warp_T1_head'.format(task_count)
script_path = os.path.join(script_dir, task_name + '.sh')
if step == step_id[-1] and it == it_at_step_id[-1]:
T1_head_key_temp = mod['T1_head_key']
# T1_head_key_temp = mod['T1_head_key']
T1_head_key_temp = 'T1_head_clamped_hard'
else:
T1_head_key_temp = 'T1_head_clamped'
with open(script_path, 'w+') as f:
......@@ -1795,7 +1854,7 @@ if __name__ == "__main__":
img_paths.append(tree.get('warped_T1brain_mask'))
nln_template_path = tree.get('T1_brain_mask_nln_template')
jobcmd = func_to_cmd(averageImages, args=(img_paths, nln_template_path, 'mean', False), tmp_dir=script_dir,
jobcmd = func_to_cmd(averageImages, args=(img_paths, nln_template_path, 'intersection', False), tmp_dir=script_dir,
kwargs=None, clean="never")
# job_ids[49] = submitJob(tag+'_'+task_name, log_dir, command=jobcmd, queue=cpuq, wait_for=[job_ids[38]])
job_ids[49] = submitJob(jobcmd, tag + '_' + task_name, log_dir, queue=cpuq, wait_for=[job_ids[38]], array_task=False, jobram=jobram_low, jobtime=jobtime_low)
......
......@@ -10,6 +10,7 @@ ext_txt=.txt
ident{ext_mat} (identity_mat)
{sub_id}
{sub_id}_T1_clamped{ext_nii} (T1_head_clamped)
{sub_id}_T1_clamped_hard{ext_nii} (T1_head_clamped_hard)
{sub_id}_T1_weighted_brain_mask{ext_nii} (T1_weighted_brain_mask)
{sub_id}_T2Lesion_in_T1_inverse_and_brain_mask{ext_nii} (inverse_lesion_and_brain_mask_in_T1)
log (log_dir)
......
......@@ -3,7 +3,7 @@ ext_mat=.mat
{sub_id}
anat
T1_orig{ext_nii} (T1_head)
T1_brain{ext_nii} (T1_brain)
T1_brain_mask{ext_nii} (T1_brain_mask)
T1_biascorr_normalised_nogm{ext_nii} (T1_head)
T1_biascorr_normalised_nogm_brain{ext_nii} (T1_brain)
T1_biascorr_brain_mask{ext_nii} (T1_brain_mask)
T2Lesion_in_T1{ext_nii} (lesion_mask_in_T1)
#!/bin/bash
#SBATCH --partition=long
#SBATCH -o /data/users/u7pt4u/out.out
#SBATCH -e /data/users/u7pt4u/error.err
#SBATCH --time=05:00
applywarp -i /data/ms/processed/mri/Jacobian/fsl/sub-CFTY720D2301x0101x00002/ses-V9x20070615/anat/T1Gd_biascorr_normalised_nogm.nii.gz -o /data/users/u7pt4u/test_output.nii.gz -r /data/ms/processed/mri/Jacobian/fsl/median_hybrid/CFTY720D2301/sub-CFTY720D2301x0101x00002/SST_FSL/nln_step_05/iteration_01/nln_template_T1_head_05_01.nii.gz -w /data/ms/processed/mri/Jacobian/fsl/median_hybrid/CFTY720D2301/sub-CFTY720D2301x0101x00002/SST_FSL/nln_step_06/iteration_01/ses-V9x20070615_to_template_warp_resampled_unbiased_full_T1_head.nii.gz --interp=spline
\ No newline at end of file
#!/bin/bash
#SBATCH --partition=long
#SBATCH -o /data/users/u7pt4u/out.out
#SBATCH -e /data/users/u7pt4u/error.err
#SBATCH --time=05:00
python /data/users/u7pt4u/ms-templates/code/test2.py
\ No newline at end of file
from fsl.wrappers.fnirt import invwarp, applywarp, convertwarp
import nibabel as nib
img_nib = nib.load('/data/ms/processed/mri/Jacobian/fsl/sub-CFTY720D2301x0101x00002/ses-V9x20070615/anat/T1_biascorr_normalised_nogm.nii.gz')
applywarp(src=img_nib, ref="/data/users/u7pt4u/ms-templates/data/processed/sub-CFTY720D2301x0101x00002/nln_step_06/iteration_01/nln_template_T1_head_06_01.nii.gz", out="/data/users/u7pt4u/ms-templates/data/processed/sub-CFTY720D2301x0101x00002/test_output_fslpy_submit_node.nii.gz", warp="/data/users/u7pt4u/ms-templates/data/processed/sub-CFTY720D2301x0101x00002/nln_step_07/iteration_01/ses-V9x20070615_to_template_warp_resampled_unbiased_full_T1_head.nii.gz", interp="spline")
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment