Commit ab1ded79 authored by Matteo Bastiani's avatar Matteo Bastiani

First commit

parents
# Compiled source #
###################
*.com
*.class
*.dll
*.exe
*.o
*.so
# Packages #
############
# it's better to unpack these files and commit the raw source
# git has its own built in compression methods
*.7z
*.dmg
*.gz
*.iso
*.jar
*.rar
*.tar
*.zip
# Logs and databases #
######################
*.log
*.sql
*.sqlite
# OS generated files #
######################
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
-----------------------------------------------
dHCP neonatal dMRI data processing pipeline
March, 2018
V 0.0.1: Pipeline reflecting the first data release processing
-----------------------------------------------
Comprehensive and automated pipeline to consistently analyse neonatal dMRI data from the developing Human Connectome Project (dHCP).
If you use the pipeline in your work, please cite the following article:
Bastiani, M., Andersson, J., Cordero-Grande, L., Murgasova, M., Hutter, J., Price, A.N., Makropoulos, A., Fitzgibbon, S.P., Hughes, E., Rueckert, D., Victor, S., Rutherford, M., Edwards, A.D., Smith, S., Tournier, J.-D., Hajnal, J.V., Jbabdi, S., Sotiropoulos, S.N. (2018). Automated processing pipeline for neonatal diffusion MRI in the developing Human Connectome Project. NeuroImage.
Installation
------------
The pipeline consists of several bash scripts.
Once it has been downloaded, the first thing to do is fill the correct paths into:
dHCP_neo_dMRI_setup.sh
The script needs to be run before launching the processing jobs.
-------------------------------------------
Examples
-------------------------------------------
To launch the pipeline for a single subject, use the following command:
${scriptsFolder}/dHCP_neo_dMRI_setJobs.sh ${rawDataFolder}/sub-${cid} ses-${no} ${rawDataFile} ${cid} ${scriptsFolder}/dhcp300_f.txt ${outFolder} ${age} ${birth} ${n_sessions}
This command will submit all the necessary scripts to process the raw dMRI data. The necessary inputs are:
${rawDataFolder}/sub-${cid}: Path to raw data
ses-${no}: Session number
${rawDataFile}: Raw data file name
${cid}: Connectome ID
${scriptsFolder}/dhcp300_f.txt: Protocol file
${outFolder}: Output folder
${age}: Age at scan (in rounded weeks)
${birth}: Age at birth (in rounded weeks)
${n_sessions}: Total number of scanning sessions for the same subject
-------------------------------------------
Non-dHCP data
-------------------------------------------
To convert a locally acquired dataset such that it can be used with the pipeline, use the command:
${scriptsFolder}/utils/getProtocol
By typing the command in a terminal, the necessary inputs will be shown.
The script will generate a raw data file and a protocol file that can be used with the pipeline.
#!/bin/bash
set -e
echo -e "\n START: Importing files"
# scriptsFolder=/home/fs0/matteob/scripts/dHCP
if [ "$6" == "" ];then
echo ""
echo "usage: $0 <Data folder> <Data filename> <Output preprocessing folder> <Acquisition protocol> <Number of volumes> <Number of b0s>"
echo " Data folder: Path to the data file"
echo " Data filename: File name for the raw data"
echo " Output preprocessing folder: Path to the output pre-processing folder"
echo " Acqusition protocol: Text file with acquisition parameters"
echo " Number of volumes: Number of acquired volumes"
echo " Number of b0s: Number of b0s from each phase encoding block to use when estimating the fieldmap"
echo ""
exit 1
fi
dataFolder=$1 # Path to data file
dataFile=$2 # Data file name
prepFolder=$3 # Output folder
acqProt=$4 # Acquisition protcol
nVols=$5 # Number of acquired volumes
noB0s=$6 # Number of B0 volumes for each PE direction used to estimate distortions with TOPUP
#============================================================================
# Separate b0 volumes according to their PE direction and obtain
# bvals and bvecs files. Round bvals to identify shells.
#============================================================================
idxLR=0
idxRL=0
idxAP=0
idxPA=0
LRvols=-1
RLvols=-1
APvols=-1
PAvols=-1
no_LR=0
no_RL=0
no_AP=0
no_PA=0
idxb400=0
idxb1000=0
idxb2600=0
i=0
echo -n > ${prepFolder}/eddy/eddyIndex.txt
while read line; do
bvec_x[i]=`echo $line | awk {'print $1'}`
bvec_y[i]=`echo $line | awk {'print $2'}`
bvec_z[i]=`echo $line | awk {'print $3'}`
bval[i]=`echo $line | awk {'print $4'}`
pedir[i]=`echo $line | awk {'print $5'}`
rotime[i]=`echo $line | awk {'print $6'}`
echo -n "${pedir[i]} " >> ${prepFolder}/eddy/eddyIndex.txt
if [ ${bval[i]} -eq 0 ]; then
if [ ${pedir[i]} -eq 1 ]; then #LR
LRvols[idxLR]=$i
idxLR=$(($idxLR + 1))
elif [ ${pedir[i]} -eq 2 ]; then #RL
RLvols[idxRL]=$i
idxRL=$(($idxRL + 1))
elif [ ${pedir[i]} -eq 3 ]; then #AP
APvols[idxAP]=$i
idxAP=$(($idxAP + 1))
elif [ ${pedir[i]} -eq 4 ]; then #PA
PAvols[idxPA]=$i
idxPA=$(($idxPA + 1))
fi
else
if [ ${pedir[i]} -eq 1 ]; then #LR
no_LR=$(($no_LR + 1))
elif [ ${pedir[i]} -eq 2 ]; then #RL
no_RL=$(($no_RL + 1))
elif [ ${pedir[i]} -eq 3 ]; then #AP
no_AP=$(($no_AP + 1))
elif [ ${pedir[i]} -eq 4 ]; then #PA
no_PA=$(($no_PA + 1))
fi
if [ ${bval[i]} -eq 400 ]; then
idxb400=$(($idxb400+1))
elif [ ${bval[i]} -eq 1000 ]; then
idxb1000=$(($idxb1000+1))
elif [ ${bval[i]} -eq 2600 ]; then
idxb2600=$(($idxb2600+1))
fi
fi
i=$(($i + 1))
if [ $i -eq ${nVols} ]; then
break
fi
done < ${acqProt}
#============================================================================
# Write json file for QC
#============================================================================
echo "{" > ${prepFolder}/dataImport.json
echo " \"no_B400_vols\": $idxb400," >> ${prepFolder}/dataImport.json
echo " \"no_B1000_vols\": $idxb1000," >> ${prepFolder}/dataImport.json
echo " \"no_B2600_vols\": $idxb2600," >> ${prepFolder}/dataImport.json
echo " \"no_LR_vols\": $no_LR," >> ${prepFolder}/dataImport.json
echo " \"no_RL_vols\": $no_RL," >> ${prepFolder}/dataImport.json
echo " \"no_AP_vols\": $no_AP," >> ${prepFolder}/dataImport.json
echo " \"no_PA_vols\": $no_PA" >> ${prepFolder}/dataImport.json
echo "}" >> ${prepFolder}/dataImport.json
#============================================================================
# Write bvecs, bvals and eddy acquisition parameters file.
#============================================================================
echo "${bvec_x[@]}" > ${prepFolder}/tmpData/bvecs
echo "${bvec_y[@]}" >> ${prepFolder}/tmpData/bvecs
echo "${bvec_z[@]}" >> ${prepFolder}/tmpData/bvecs
echo "${bval[@]}" > ${prepFolder}/bvals
if [ $LRvols -ne -1 ]; then
echo -1 0 0 ${rotime[${LRvols[0]}]} > ${prepFolder}/eddy/acqparamsUnwarp.txt
else
echo -1 0 0 0.05 > ${prepFolder}/eddy/acqparamsUnwarp.txt
fi
if [ $RLvols -ne -1 ]; then
echo 1 0 0 ${rotime[${RLvols[0]}]} >> ${prepFolder}/eddy/acqparamsUnwarp.txt
else
echo 1 0 0 0.05 >> ${prepFolder}/eddy/acqparamsUnwarp.txt
fi
if [ $APvols -ne -1 ]; then
echo 0 -1 0 ${rotime[${APvols[0]}]} >> ${prepFolder}/eddy/acqparamsUnwarp.txt
else
echo 0 -1 0 0.05 >> ${prepFolder}/eddy/acqparamsUnwarp.txt
fi
if [ $PAvols -ne -1 ]; then
echo 0 1 0 ${rotime[${PAvols[0]}]} >> ${prepFolder}/eddy/acqparamsUnwarp.txt
else
echo 0 1 0 0.05 >> ${prepFolder}/eddy/acqparamsUnwarp.txt
fi
#============================================================================
# Reorient raw data and bvecs to standard orientations
#============================================================================
${FSLDIR}/bin/fslreorient2std ${dataFolder}/${dataFile} ${prepFolder}/tmpData/data
${FSLDIR}/bin/fslreorient2std ${dataFolder}/${dataFile} > ${prepFolder}/tmpData/raw2std.mat
echo 1 0 0 0 > ${prepFolder}/tmpData/flipZ.mat
echo 0 1 0 0 >> ${prepFolder}/tmpData/flipZ.mat
echo 0 0 -1 0 >> ${prepFolder}/tmpData/flipZ.mat
echo 0 0 0 1 >> ${prepFolder}/tmpData/flipZ.mat
${scriptsFolder}/utils/rotateBvecs.sh ${prepFolder}/tmpData/bvecs ${prepFolder}/tmpData/raw2std.mat ${prepFolder}/tmpData/bvecs2
${scriptsFolder}/utils/rotateBvecs.sh ${prepFolder}/tmpData/bvecs2 ${prepFolder}/tmpData/flipZ.mat ${prepFolder}/bvecs
#============================================================================
# Check that in-plane matrix size is a multiple of 2 (for TOPUP)
#============================================================================
dimt1=`${FSLDIR}/bin/fslval ${prepFolder}/tmpData/data dim1`
c1=$(($dimt1%2))
if [ $c1 -ne 0 ]; then
${FSLDIR}/bin/fslroi ${prepFolder}/tmpData/data ${prepFolder}/tmpData/tmp 0 1 0 -1 0 -1 0 -1
${FSLDIR}/bin/fslmerge -x ${prepFolder}/tmpData/data ${prepFolder}/tmpData/data ${prepFolder}/tmpData/tmp
fi
dimt2=`${FSLDIR}/bin/fslval ${prepFolder}/tmpData/data dim2`
c2=$(($dimt2%2))
if [ $c2 -ne 0 ]; then
${FSLDIR}/bin/fslroi ${prepFolder}/tmpData/data ${prepFolder}/tmpData/tmp 0 -1 0 1 0 -1 0 -1
${FSLDIR}/bin/fslmerge -y ${prepFolder}/tmpData/data ${prepFolder}/tmpData/data ${prepFolder}/tmpData/tmp
fi
#============================================================================
# Identify best b0s for every acquired PE direction
#============================================================================
echo "${LRvols[@]}" > ${prepFolder}/topup/b0Indices
echo "${RLvols[@]}" >> ${prepFolder}/topup/b0Indices
echo "${APvols[@]}" >> ${prepFolder}/topup/b0Indices
echo "${PAvols[@]}" >> ${prepFolder}/topup/b0Indices
if [ $LRvols -ne -1 ]; then
tmp=$(printf ",%s" "${LRvols[@]}")
tmp=${tmp:1}
${FSLDIR}/bin/fslselectvols -i ${prepFolder}/tmpData/data -o ${prepFolder}/topup/LR_B0s --vols=${tmp}
$scriptsFolder/utils/pickBestB0s.sh ${prepFolder}/topup 0 ${noB0s}
fi
if [ $RLvols -ne -1 ]; then
tmp=$(printf ",%s" "${RLvols[@]}")
tmp=${tmp:1}
${FSLDIR}/bin/fslselectvols -i ${prepFolder}/tmpData/data -o ${prepFolder}/topup/RL_B0s --vols=${tmp}
$scriptsFolder/utils/pickBestB0s.sh ${prepFolder}/topup 1 ${noB0s}
fi
if [ $APvols -ne -1 ]; then
tmp=$(printf ",%s" "${APvols[@]}")
tmp=${tmp:1}
${FSLDIR}/bin/fslselectvols -i ${prepFolder}/tmpData/data -o ${prepFolder}/topup/AP_B0s --vols=${tmp}
$scriptsFolder/utils/pickBestB0s.sh ${prepFolder}/topup 2 ${noB0s}
fi
if [ $PAvols -ne -1 ]; then
tmp=$(printf ",%s" "${PAvols[@]}")
tmp=${tmp:1}
${FSLDIR}/bin/fslselectvols -i ${prepFolder}/tmpData/data -o ${prepFolder}/topup/PA_B0s --vols=${tmp}
$scriptsFolder/utils/pickBestB0s.sh ${prepFolder}/topup 3 ${noB0s}
fi
#============================================================================
# Sort b0s based on their scores, merge them in a file and write acqparams
# for topup and reference scan for eddy.
#============================================================================
bestPE=(`cat ${prepFolder}/topup/idxBestB0s.txt | sort -k 3 -n | head -n 4 | awk '{print $1}'`)
bestVol=(`cat ${prepFolder}/topup/idxBestB0s.txt | sort -k 3 -n | head -n 4 | awk '{print $2}'`)
acqpPE=("-1 0 0" "1 0 0" "0 -1 0" "0 1 0")
PE=("LR" "RL" "AP" "PA")
count=0
echo -n > ${prepFolder}/topup/acqparams.txt
for i in "${bestPE[@]}" ; do
dimt4=`${FSLDIR}/bin/fslval ${prepFolder}/topup/"${PE[$i]}"_B0s.nii.gz dim4`
for j in $(seq 0 $((${dimt4}-1))) ; do
echo "${acqpPE[$i]}" "${rotime[${bestVol[${count}]}]}" >> ${prepFolder}/topup/acqparams.txt
done
PE_B0s[$count]=${prepFolder}/topup/"${PE[$i]}"_B0s.nii.gz
count=$(($count+1))
done
echo ${PE_B0s[@]}
${FSLDIR}/bin/fslmerge -t ${prepFolder}/topup/phase ${PE_B0s[@]}
echo "${bestVol[0]}" > ${prepFolder}/eddy/ref_scan.txt
echo -e "\n END: Importing files"
#!/bin/bash
set -e
echo -e "\n START: running BPX model 3..."
diffFolder=$1
#============================================================================
# Run BPX model 3
#============================================================================
${FSLDIR}/bin/bedpostx ${diffFolder} -n 3 -b 3000 -model 3 --Rmean=0.3
echo -e "\n END: runBPX"
#!/bin/bash
set -e
echo -e "\n START: runEddy"
prepFolder=$1
dataFile=$2
topupFolder=${prepFolder}/topup
eddyFolder=${prepFolder}/eddy
ref_scan=`cat ${eddyFolder}/ref_scan.txt`
${FSLDIR}/bin/eddy_cuda --imain=${dataFile} --mask=${topupFolder}/nodif_brain_mask.nii.gz --index=${eddyFolder}/eddyIndex.txt --bvals=${prepFolder}/bvals --bvecs=${prepFolder}/bvecs --acqp=${eddyFolder}/acqparamsUnwarp.txt --topup=${topupFolder}/topup_results --out=${eddyFolder}/eddy_corrected --very_verbose --niter=5 --fwhm=10,5,0,0,0 --s2v_niter=10 --mporder=8 --nvoxhp=5000 --slspec=${scriptsFolder}/slorder.txt --repol --ol_type=both --s2v_interp=trilinear --s2v_lambda=1 --ref_scan_no=${ref_scan} --data_is_shelled --cnr_maps --residuals --dont_mask_output
#============================================================================
# Run bet on average iout.
#============================================================================
echo "Running BET on the hifi b0"
${FSLDIR}/bin/select_dwi_vols ${eddyFolder}/eddy_corrected ${prepFolder}/bvals ${eddyFolder}/hifib0 0 -m
${FSLDIR}/bin/bet ${eddyFolder}/hifib0 ${eddyFolder}/nodif_brain -m -f 0.25 -R
#================
# Quality Control
#================
i=-1
n_ol_b400=0
n_ol_b1000=0
n_ol_b2600=0
n_ol_LR=0
n_ol_RL=0
n_ol_AP=0
n_ol_PA=0
bvals=($(head -n 1 ${prepFolder}/bvals))
eddyIndex=($(head -n 1 ${eddyFolder}/eddyIndex.txt))
dimt3=`${FSLDIR}/bin/fslval ${eddyFolder}/eddy_corrected.nii.gz dim3`
dimt4=`${FSLDIR}/bin/fslval ${eddyFolder}/eddy_corrected.nii.gz dim4`
# Compute outlier stats
# n_ol_sl: number of times slice n has been classified as an outlier
# n_ol_vol: number of outlier slices in each volume
# n_ol_b*: number of outlier slices for each b-value shell
# n_ol_*: number of outlier slices for each phase encode direction
# tot_ol: total number of outliers
while read line
do
if [ $i -ge 0 ]; then
#echo ${line}
tmp=(${line})
a=0
for ((ii=0; ii<$dimt3; ii++)); do
a=$((${a}+${tmp[ii]}))
n_ol_sl[ii]=$((${n_ol_sl[ii]}+${tmp[ii]}))
done
n_ol_vol[i]=$a
if [ ${bvals[i]} -eq 400 ]; then
n_ol_b400=$((${n_ol_b400}+${a}))
elif [ ${bvals[i]} -eq 1000 ]; then
n_ol_b1000=$((${n_ol_b1000}+${a}))
elif [ ${bvals[i]} -eq 2600 ]; then
n_ol_b2600=$((${n_ol_b2600}+${a}))
fi
if [ ${eddyIndex[i]} -eq 1 ]; then
n_ol_LR=$((${n_ol_LR}+${a}))
elif [ ${eddyIndex[i]} -eq 2 ]; then
n_ol_RL=$((${n_ol_RL}+${a}))
elif [ ${eddyIndex[i]} -eq 3 ]; then
n_ol_AP=$((${n_ol_AP}+${a}))
elif [ ${eddyIndex[i]} -eq 4 ]; then
n_ol_PA=$((${n_ol_PA}+${a}))
fi
fi
i=$(($i + 1))
done < ${eddyFolder}/eddy_corrected.eddy_outlier_map
tot_ol=$((${n_ol_b400}+${n_ol_b1000}+${n_ol_b2600}))
# Compute average subject motion
# m_abs: average absolute subject motion
# m_rel: average relative subject motion
m_abs=0
m_rel=0
while read line
do
# Read first column from EDDY output
val=`echo $line | awk {'print $1'}`
# To handle scientific notation, we need the following line
val=`echo ${val} | sed -e 's/[eE]+*/\\*10\\^/'`
m_abs=`echo "${m_abs} + ${val}" | bc -l`
# Read second column from EDDY output
val=`echo $line | awk {'print $2'}`
# To handle scientific notation, we need the following line
val=`echo ${val} | sed -e 's/[eE]+*/\\*10\\^/'`
m_rel=`echo "${m_rel} + ${val}" | bc -l`
done < ${eddyFolder}/eddy_corrected.eddy_movement_rms
m_abs=`echo "${m_abs} / ${dimt4}" | bc -l`
m_rel=`echo "${m_rel} / ${dimt4}" | bc -l`
# Write .json file
echo "{" > ${eddyFolder}/eddy_corrected.json
echo " \"Tot_ol\": $tot_ol," >> ${eddyFolder}/eddy_corrected.json
tmp=$(printf ", %s" "${n_ol_vol[@]}")
tmp=${tmp:2}
echo " \"No_ol_volumes\": [$tmp]," >> ${eddyFolder}/eddy_corrected.json
tmp=$(printf ", %s" "${n_ol_sl[@]}")
tmp=${tmp:2}
echo " \"No_ol_slices\": [$tmp]," >> ${eddyFolder}/eddy_corrected.json
echo " \"No_ol_b400\": $n_ol_b400," >> ${eddyFolder}/eddy_corrected.json
echo " \"No_ol_b1000\": $n_ol_b1000," >> ${eddyFolder}/eddy_corrected.json
echo " \"No_ol_b2600\": $n_ol_b2600," >> ${eddyFolder}/eddy_corrected.json
echo " \"No_ol_LR\": $n_ol_LR," >> ${eddyFolder}/eddy_corrected.json
echo " \"No_ol_RL\": $n_ol_RL," >> ${eddyFolder}/eddy_corrected.json
echo " \"No_ol_AP\": $n_ol_AP," >> ${eddyFolder}/eddy_corrected.json
echo " \"No_ol_PA\": $n_ol_PA," >> ${eddyFolder}/eddy_corrected.json
echo " \"Avg_motion\": $m_abs" >> ${eddyFolder}/eddy_corrected.json
echo "}" >> ${eddyFolder}/eddy_corrected.json
echo -e "\n END: runEddy"
#!/bin/bash
set -e
echo -e "\n START: runPostProc"
prepFolder=$1
diffFolder=$2
qcFlag=$3
topupFolder=${prepFolder}/topup
eddyFolder=${prepFolder}/eddy
if [ ${qcFlag} -eq 1 ]; then
mkdir -p ${prepFolder}/QC
fi
mkdir -p ${diffFolder}
#============================================================================
# Remove negative intensity values (caused by spline interpolation) from
# pre-processed data. Copy bvals and (rotated) bvecs to the Diffusion folder.
#============================================================================
${FSLDIR}/bin/fslmaths ${eddyFolder}/data_sr -thr 0 ${diffFolder}/data
rm ${eddyFolder}/data_sr.*
cp ${prepFolder}/bvals ${diffFolder}/bvals
cp ${eddyFolder}/eddy_corrected.eddy_rotated_bvecs ${diffFolder}/bvecs
#============================================================================
# Get the brain mask using BET, average shell data and attenuation profiles.
# Fit diffusion tensor to each shell separately. If interested in qc, store
# volumes and variances.
#============================================================================
uniqueBvals=(0 400 1000 2600)
for b in "${uniqueBvals[@]}"; do
${FSLDIR}/bin/select_dwi_vols ${diffFolder}/data ${diffFolder}/bvals ${diffFolder}/mean_b${b} ${b} -m
if [ ${qcFlag} -eq 1 ]; then
${FSLDIR}/bin/select_dwi_vols ${diffFolder}/data ${diffFolder}/bvals ${prepFolder}/QC/vols_b${b} ${b}
${FSLDIR}/bin/select_dwi_vols ${diffFolder}/data ${diffFolder}/bvals ${prepFolder}/QC/var_b${b} ${b} -v
fi
if [ ${b} -eq 0 ]; then
${FSLDIR}/bin/bet ${diffFolder}/mean_b${b} ${diffFolder}/nodif_brain -m -f 0.25 -R
else
echo "Multi-shell data: fitting DT to b=${b} shell..."
mkdir -p ${diffFolder}/dtifit_b${b}
${FSLDIR}/bin/select_dwi_vols ${diffFolder}/data ${diffFolder}/bvals ${diffFolder}/dtifit_b${b}/b${b} 0 -b ${b} -obv ${diffFolder}/bvecs
${FSLDIR}/bin/dtifit -k ${diffFolder}/dtifit_b${b}/b${b} -o ${diffFolder}/dtifit_b${b}/dti -m ${diffFolder}/nodif_brain_mask -r ${diffFolder}/dtifit_b${b}/b${b}.bvec -b ${diffFolder}/dtifit_b${b}/b${b}.bval --sse --save_tensor
${FSLDIR}/bin/fslmaths ${diffFolder}/mean_b${b} -div ${diffFolder}/mean_b0 -mul ${diffFolder}/nodif_brain_mask ${diffFolder}/att_b${b}
fi
${FSLDIR}/bin/fslmaths ${diffFolder}/mean_b${b} -mul ${diffFolder}/nodif_brain_mask ${diffFolder}/mean_b${b}
done
#============================================================================
# Fit Kurtosis model.
#============================================================================
echo "Multi-shell data: fitting DK to all shells..."
mkdir -p ${diffFolder}/dkifit
${FSLDIR}/bin/dtifit -k ${diffFolder}/data -o ${diffFolder}/dkifit/dki -m ${diffFolder}/nodif_brain_mask -r ${diffFolder}/bvecs -b ${diffFolder}/bvals --sse --save_tensor --kurt --kurtdir
#rm -R ${preprocdir}/tmpData
echo -e "\n END: runPostProc"
This diff is collapsed.
#!/bin/bash
set -e
echo -e "\n Running topup..."
topupDir=$1 # Folder where input files are and where output will be stored
topupConfigFile=${FSLDIR}/etc/flirtsch/b02b0.cnf
#============================================================================
# Run topup on the selected b0 volumes.
#============================================================================
${FSLDIR}/bin/topup --imain=${topupDir}/phase --datain=${topupDir}/acqparams.txt --config=${topupConfigFile} --fout=${topupDir}/fieldmap --iout=${topupDir}/topup_b0s --out=${topupDir}/topup_results -v
#============================================================================
# Run bet on average iout.
#============================================================================
echo "Running BET on the hifi b0"
${FSLDIR}/bin/fslmaths ${topupDir}/topup_b0s -Tmean ${topupDir}/topup_hifib0
${FSLDIR}/bin/bet ${topupDir}/topup_hifib0 ${topupDir}/nodif_brain -m -f 0.25 -R
echo -e "\n END: runTopup."
#!/bin/bash
set -e
echo -e "\n START: Setting up jobs for the dHCP neonatal dMRI data processing pipeline"
if [ "$9" == "" ];then
echo "The script will queue all the jobs necessary to process dHCP neonatal dMRI data"
echo ""
echo "usage: $0 <data folder> <session folder> <data file> <connectome ID> <acquisition protocol> <readout time> <output folder> <age at scan> <age at birth> <no. sessions>"
echo ""
echo " data folder: path where the session data is stored"
echo " session folder: session folder name"
echo " data file: raw data file name"
echo " connectome ID: connectome ID number (CC...)"
echo " acquisition protocol: name of the text file containing gradient orientations, b-values and phase encoding directions"
echo " output folder: folder where results will be stored"
echo " age at scan: in weeks (GA)"
echo " age at birth: in weeks (GA)"
echo " no. sessions: number of sessions"
echo ""
echo ""
exit 1
fi
dataFolder=$1 # Path to data folder
sessionFolder=$2 # Session number
dataFile=$3 # FileName to avoid confusion...
connID=$4 # Unique connectome ID
acqProt=$5 # Acquisition protocol file
outFolder=$6 # Path where the processed subjects will be stored
ageScan=$7 # Age at scan (w GA)
ageBirth=$8 # Age at birth (w GA)
nSessions=$9 # Number of sessions
noB0s=2 # Number of B0 volumes for each PE direction used to estimate distortions with TOPUP
#============================================================================
# Check for scan completeness
#============================================================================
dimt4=`${FSLDIR}/bin/fslval ${dataFolder}/${sessionFolder}/DWI/${dataFile} dim4`
complete_check=${dimt4}
usable_check=1
if [ $dimt4 -lt 34 ]; then
echo "WARNING: The dataset is unusable as it does not contain enough B0 volumes"
SubjFolder=/vols/Data/baby/DMRI_pipeline_testing/unusable_"$ConnID"
mkdir -p ${SubjFolder}
usable_check=0
exit 1
elif [ $dimt4 -lt 123 ]; then
echo "WARNING: The dataset is incomplete and does not contain enough B0 pairs for each PE direction"
noB0s=1
usable_check=1
fi
#============================================================================
# Create directory structure and write ages
#============================================================================
subjOutFolder=${outFolder}/${connID}/${sessionFolder}
prepFolder=${subjOutFolder}/PreProcessed
anatFolder=${subjOutFolder}/T2w
diffFolder=${subjOutFolder}/Diffusion
mkdir -p ${prepFolder}/topup
mkdir -p ${prepFolder}/eddy
mkdir -p ${prepFolder}/tmpData
mkdir -p ${anatFolder}
mkdir -p ${diffFolder}
echo `awk -v v="$ageScan" 'BEGIN{printf "%.0f", v}'` > ${subjOutFolder}/age
echo `awk -v v="$ageBirth" 'BEGIN{printf "%.0f", v}'` > ${subjOutFolder}/birth
#============================================================================
# Store QC information
#============================================================================
echo "{" > ${subjOutFolder}/initQC.json
echo " \"Complete\": ${complete_check}," >> ${subjOutFolder}/initQC.json
echo " \"Usable\": ${usable_check}," >> ${subjOutFolder}/initQC.json
echo " \"nSessions\": ${nSessions}," >> ${subjOutFolder}/initQC.json
echo " \"birthAge\": ${ageBirth}," >> ${subjOutFolder}/initQC.json
echo " \"scanAge\": ${ageScan}" >> ${subjOutFolder}/initQC.json
echo "}" >> ${subjOutFolder}/initQC.json
#============================================================================
# Prepare super-resolution commands
#============================================================================
${scriptsFolder}/superres/setup_superres.sh ${prepFolder}/eddy/eddy_corrected.nii.gz ${prepFolder}/eddy/nodif_brain_mask.nii.gz ${prepFolder}/eddy/sr_commands.txt ${dimt4}
#============================================================================
# Set main chain starting from last completed step
#============================================================================
if [ ! -e ${prepFolder}/eddy/eddy_corrected.nii.gz ]; then
#============================================================================
# Import files and create initial structure
#============================================================================
importid=`${FSLDIR}/bin/fsl_sub -q short.q -l ${prepFolder}/logs ${scriptsFolder}/dHCP_neo_dMRI_importFiles.sh ${dataFolder}/${sessionFolder}/DWI ${dataFile} ${prepFolder} ${acqProt} ${dimt4} ${noB0s}`
#============================================================================
# Run TOPUP
#============================================================================
topupid=`${FSLDIR}/bin/fsl_sub -q short.q -j ${importid} -l ${prepFolder}/logs ${scriptsFolder}/dHCP_neo_dMRI_runTopup.sh ${prepFolder}/topup`
#============================================================================
# EDDY run with outlier detection and replacemetn, s2v and no masking of out-of-FOV voxels
#============================================================================
eddyid=`${FSLDIR}/bin/fsl_sub -q cuda.q -j ${topupid} -l ${prepFolder}/logs ${scriptsFolder}/dHCP_neo_dMRI_runEddy.sh ${prepFolder} ${prepFolder}/tmpData/data`
#============================================================================
# Super resolution for EDDY corrected data
#============================================================================
pre_srid=`${FSLDIR}/bin/fsl_sub -q long.q -j ${eddyid} -l ${prepFolder}/logs ${scriptsFolder}/superres/prerun_superres.sh ${prepFolder}/eddy/eddy_corrected.nii.gz`
srid=`${FSLDIR}/bin/fsl_sub -q long.q -j ${pre_srid} -l ${prepFolder}/logs -t ${prepFolder}/eddy/sr_commands.txt`
post_srid=`${FSLDIR}/bin/fsl_sub -q long.q -j ${srid} -l ${prepFolder}/logs ${scriptsFolder}/superres/postrun_superres.sh ${prepFolder}/eddy/eddy_corrected.nii.gz`
#============================================================================
# Run EDDY post processing steps and DT fit
#============================================================================
pprocid=`${FSLDIR}/bin/fsl_sub -q long.q -j ${post_srid} -l ${prepFolder}/logs ${scriptsFolder}/dHCP_neo_dMRI_runPostProc.sh ${prepFolder} ${diffFolder} 1`
#============================================================================
# Run BPX model 3
#============================================================================
bpxid=`${FSLDIR}/bin/fsl_sub -q cuda.q -j ${pprocid} -l ${prepFolder}/logs ${scriptsFolder}/dHCP_neo_dMRI_runBPX.sh ${diffFolder}`
#============================================================================
# Coregister DWI with Structural T2 (native and template spaces)
#============================================================================
regid=`${FSLDIR}/bin/fsl_sub -q long.q -j ${pprocid} -l ${prepFolder}/logs ${scriptsFolder}/dHCP_neo_dMRI_runRegistration.sh ${anatFolder} ${subjOutFolder} ${t2Folder} ${segFolder} ${surfFolder} ${dofsFolder}`
#============================================================================
# Generate PNGs for QC reports
#============================================================================
pngid=`${FSLDIR}/bin/fsl_sub -q short.q -j ${regid} -l ${prepFolder}/logs ${scriptsFolder}/utils/generateFigs.sh ${subjOutFolder}`
else
#============================================================================
# Run EDDY post processing steps
#============================================================================
pprocid=`${FSLDIR}/bin/fsl_sub -q cuda.q -l ${prepFolder}/logs ${scriptsFolder}/dHCP_neo_dMRI_runPostProc.sh ${prepFolder} ${diffFolder} 1`
#============================================================================
# Run BPX model 3
#============================================================================
bpxid=`${FSLDIR}/bin/fsl_sub -q cuda.q -j ${pprocid} -l ${prepFolder}/logs ${scriptsFolder}/dHCP_neo_dMRI_runBPX.sh ${diffFolder}`
#============================================================================
# Coregister DWI with Structural T2 (native and template spaces)
#============================================================================
regid=`${FSLDIR}/bin/fsl_sub -q long.q -j ${pprocid} -l ${prepFolder}/logs ${scriptsFolder}/dHCP_neo_dMRI_runRegistration.sh ${anatFolder} ${subjOutFolder} ${t2Folder} ${segFolder} ${surfFolder} ${dofsFolder}`