Commit 1744662f authored by Amy Howard's avatar Amy Howard
Browse files

Adding NODDI model to gitlab

parents
#define MACRO template <typename T> __device__ inline
#define MN 7
//legendre gaussian integrals, order 6
MACRO void legendreGaussianIntegral(T x, T* L)
{
if(x>(T)0.05){
//exact
T I[MN];
T sqrtx = sqrt_gpu(x);
I[0] = sqrt_gpu((T)MPI)*erff(sqrtx)/sqrtx;
T dx = (T)1.0/x;
T emx = -exp_gpu(-x);
for (int i=1;i<MN;i++){
I[i] = emx + ((i+1)-(T)1.5)*I[i-1];
I[i] = I[i]*dx;
}
L[0] = I[0];
L[1] = -(T)0.5*I[0] + (T)1.5*I[1];
L[2] = (T)0.375*I[0] - (T)3.75*I[1] + (T)4.375*I[2];
L[3] = -(T)0.3125*I[0] + (T)6.5625*I[1] - (T)19.6875*I[2] + (T)14.4375*I[3];
L[4] = (T)0.2734375*I[0] - (T)9.84375*I[1] + (T)54.140625*I[2] - (T)93.84375*I[3] + (T)50.2734375*I[4];
L[5] = -((T)63.0/(T)256.0)*I[0] + ((T)3465.0/(T)256.0)*I[1] - ((T)30030.0/(T)256.0)*I[2] + ((T)90090.0/(T)256.0)*I[3] - ((T)109395.0/(T)256.0)*I[4] + ((T)46189.0/(T)256.0)*I[5];
L[6] = ((T)231.0/(T)1024.0)*I[0] - ((T)18018.0/(T)1024.0)*I[1] + ((T)225225.0/(T)1024.0)*I[2] - ((T)1021020.0/(T)1024.0)*I[3] + ((T)2078505.0/(T)1024.0)*I[4] - ((T)1939938.0/(T)1024.0)*I[5] + ((T)676039.0/(T)1024.0)*I[6];
}else{
// x<=0.05, approx
// Computing the legendre gaussian integrals for small x
T x2=x*x;
T x3=x2*x;
T x4=x3*x;
T x5=x4*x;
T x6=x5*x;
L[0] = (T)2.0 - (T)2.0*x/(T)3.0 + x2/(T)5.0 - x3/(T)21.0 + x4/(T)108.0;
L[1] = -(T)4.0*x/(T)15.0 + (T)4.0*x2/(T)35.0 - (T)2.0*x3/(T)63.0 + (T)2.0*x4/(T)297.0;
L[2] = (T)8.0*x2/(T)315.0 - (T)8.0*x3/(T)693.0 + (T)4.0*x4/(T)1287.0;
L[3] = -(T)16.0*x3/(T)9009.0 + (T)16.0*x4/(T)19305.0;
L[4] = (T)32.0*x4/(T)328185.0;
L[5] = -(T)64.0*x5/(T)14549535.0;
L[6] = (T)128.0*x6/(T)760543875.0;
}
}
MACRO T legendre_m0(int l, T x)
{
// From numerical recipes
// Computes the associated Legendre polynomial P m0-l.
// 0 ≤ m ≤ l
T pmm=(T)1.0;
if(l==0){
return pmm;
}else{
T pmmp1=x;
T pll=(T)0.0;
if (l==1){
return pmmp1;
}else{
for(int ll=2;ll<=l;ll++){
pll=(x*((T)2.0*ll-(T)1.0)*pmmp1-(ll-(T)1.0)*pmm)/((T)ll);
pmm=pmmp1;
pmmp1=pll;
}
return pll;
}
}
}
MACRO void computeSH_values(T xv, T* SH)
{
SH[0] = sqrt_gpu(((T)1.0-(T)0.75)/(T)MPI);
SH[0]*= legendre_m0(0,xv); // l=0
SH[1] = sqrt_gpu(((T)2.0-(T)0.75)/(T)MPI);
SH[1]*= legendre_m0(2,xv); // l=2
SH[2] = sqrt_gpu(((T)3.0-(T)0.75)/(T)MPI);
SH[2]*= legendre_m0(4,xv); // l=4
SH[3] = sqrt_gpu(((T)4.0-(T)0.75)/(T)MPI);
SH[3]*= legendre_m0(6,xv); // l=6
SH[4] = sqrt_gpu(((T)5.0-(T)0.75)/(T)MPI);
SH[4]*= legendre_m0(8,xv); // l=8
SH[5] = sqrt_gpu(((T)6.0-(T)0.75)/(T)MPI);
SH[5]*= legendre_m0(10,xv); // l=10
SH[6] = sqrt_gpu(((T)7.0-(T)0.75)/(T)MPI);
SH[6]*= legendre_m0(12,xv); // l=12
}
#!/bin/sh
#
# Moises Hernandez-Fernandez - FMRIB Image Analysis Group
#
# Copyright (C) 2004 University of Oxford
#
# SHCOPYRIGHT
Usage() {
echo ""
echo "Usage: NODDI_Watson_finish <subject_directory>"
echo ""
echo "expects to find all the estimatedParameters in subject directory"
echo ""
exit 1
}
[ "$1" = "" ] && Usage
directory=$1
cd ${directory}
mv $directory/Param_0_samples.nii.gz $directory/fiso_samples.nii.gz
mv $directory/Param_1_samples.nii.gz $directory/fintra_samples.nii.gz
mv $directory/Param_2_samples.nii.gz $directory/kappa_samples.nii.gz
# Change range???
mv $directory/Param_3_samples.nii.gz $directory/th_samples.nii.gz
mv $directory/Param_4_samples.nii.gz $directory/ph_samples.nii.gz
Two_div_pi=0.636619772367581
$FSLDIR/bin/fslmaths $directory/fiso_samples.nii.gz -Tmean $directory/mean_fiso
$FSLDIR/bin/fslmaths $directory/fintra_samples.nii.gz -Tmean $directory/mean_fintra
$FSLDIR/bin/fslmaths $directory/kappa_samples.nii.gz -Tmean $directory/mean_kappa
$FSLDIR/bin/make_dyadic_vectors $directory/th_samples $directory/ph_samples $directory/nodif_brain_mask.nii.gz dyads1
${FSLDIR}/bin/fslmaths $directory/mean_kappa -recip -atan -mul $Two_div_pi $directory/OD
# For postmortem model have a 5th parameter (non-attenuating dot compartment)
if [ -f $directory/Param_5_samples.nii.gz ]; then
mv $directory/Param_5_samples.nii.gz $directory/irFrac_samples.nii.gz
$FSLDIR/bin/fslmaths $directory/irFrac_samples.nii.gz -Tmean $directory/mean_irFrac.nii.gz
fi
#!/bin/sh
#
# Moises Hernandez-Fernandez - FMRIB Image Analysis Group
#
# Copyright (C) 2004 University of Oxford
#
# SHCOPYRIGHT
Usage() {
echo ""
echo "Usage: NODDI_Watson_finish <subject_directory>"
echo ""
echo "expects to find all the estimatedParameters in subject directory"
echo ""
exit 1
}
[ "$1" = "" ] && Usage
directory=$1
cd ${directory}
mv $directory/Param_0_samples.nii.gz $directory/fiso_samples.nii.gz
mv $directory/Param_1_samples.nii.gz $directory/fintra_samples.nii.gz
mv $directory/Param_2_samples.nii.gz $directory/kappa_samples.nii.gz
# Change range???
mv $directory/Param_3_samples.nii.gz $directory/th_samples.nii.gz
mv $directory/Param_4_samples.nii.gz $directory/ph_samples.nii.gz
Two_div_pi=0.636619772367581
$FSLDIR/bin/fslmaths $directory/fiso_samples.nii.gz -Tmean $directory/mean_fiso
$FSLDIR/bin/fslmaths $directory/fintra_samples.nii.gz -Tmean $directory/mean_fintra
$FSLDIR/bin/fslmaths $directory/kappa_samples.nii.gz -Tmean $directory/mean_kappa
$FSLDIR/bin/make_dyadic_vectors $directory/th_samples $directory/ph_samples $directory/nodif_brain_mask.nii.gz dyads1
${FSLDIR}/bin/fslmaths $directory/mean_kappa -recip -atan -mul $Two_div_pi $directory/OD
# For postmortem model have a 5th parameter (non-attenuating dot compartment)
if [ -f $directory/Param_5_samples.nii.gz ]; then
mv $directory/Param_5_samples.nii.gz $directory/irFrac_samples.nii.gz
$FSLDIR/bin/fslmaths $directory/irFrac_samples.nii.gz -Tmean $directory/mean_irFrac.nii.gz
fi
#!/bin/sh
#
# Moises Hernandez-Fernandez - FMRIB Image Analysis Group
#
# Copyright (C) 2004 University of Oxford
#
# SHCOPYRIGHT
#
# Pipeline for fitting NODDI-Watson
# Now with user specified dax and diso - Amy Howard Apr 2020
if [ "x$CUDIMOT" == "x" ]; then
echo ""
echo "Please, set enviroment variable CUDIMOT with the path where cuDIMOT is installed"
echo "The path must contain a bin directory with binary files, i.e. \$CUDIMOT/bin"
echo "For instance: export CUDIMOT=/home/moises/CUDIMOT"
echo ""
exit 1
fi
bindir=${CUDIMOT}/bin
make_absolute(){
dir=$1;
if [ -d ${dir} ]; then
OLDWD=`pwd`
cd ${dir}
dir_all=`pwd`
cd $OLDWD
else
dir_all=${dir}
fi
echo ${dir_all}
}
Usage() {
echo ""
echo "Usage: Pipeline_NODDI_Watson_diff.sh <subject_directory> [options]"
echo ""
echo "expects to find data and nodif_brain_mask in subject directory"
echo ""
echo "<options>:"
echo "--dax (axial diffusivity in m^2/s e.g. 0.0017)"
echo "--diso (isotropic diffusivity in m^2/s e.g. 0.003)"
echo "-Q (name of the GPU(s) queue, default cuda.q (defined in environment variable: FSLGECUDAQ)"
echo "-NJOBS (number of jobs to queue, the data is divided in NJOBS parts, usefull for a GPU cluster, default 4)"
echo "--runMCMC (if you want to run MCMC)"
echo "--rician (if you want to run rician noise modelling)"
echo "-b (burnin period, default 5000)"
echo "-j (number of jumps, default 1250)"
echo "-s (sample every, default 25)"
echo "--BIC_AIC (calculate BIC & AIC)"
echo ""
exit 1
}
[ "$1" = "" ] && Usage
modelname=NODDI_Watson_diff
step1=GridSeach
step2=FitFractions
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${FSLDIR}/lib
subjdir=`make_absolute $1`
subjdir=`echo $subjdir | sed 's/\/$/$/g'`
echo "---------------------------------------------------------------------------------"
echo "------------------------------------ CUDIMOT ------------------------------------"
echo "----------------------------- MODEL: $modelname -----------------------------"
echo "---------------------------------------------------------------------------------"
echo subjectdir is $subjdir
start=`date +%s`
#parse option arguments
njobs=1
burnin=1000
njumps=1250
sampleevery=25
other=""
queue=""
lastStepModelOpts=" --getPredictedSignal"
dax=0.0017
diso=0.003
shift
while [ ! -z "$1" ]
do
case "$1" in
--dax) dax=$2;shift;;
--diso) diso=$2;shift;;
-Q) queue="-q $2";shift;;
-NJOBS) njobs=$2;shift;;
-b) burnin=$2;shift;;
-j) njumps=$2;shift;;
-s) sampleevery=$2;shift;;
--runMCMC) lastStepModelOpts=$lastStepModelOpts" --runMCMC";;
--BIC_AIC) lastStepModelOpts=$lastStepModelOpts" --BIC_AIC";;
--rician) lastStepModelOpts=$lastStepModelOpts" --rician";;
#*) other=$other" "$1;;
esac
shift
done
#Set options
opts="--bi=$burnin --nj=$njumps --se=$sampleevery"
opts="$opts $other"
if [ "x$SGE_ROOT" != "x" ]; then
queue="-q $FSLGECUDAQ"
fi
#check that all required files exist
if [ ! -d $subjdir ]; then
echo "subject directory $1 not found"
exit 1
fi
if [ `${FSLDIR}/bin/imtest ${subjdir}/data` -eq 0 ]; then
echo "${subjdir}/data not found"
exit 1
fi
if [ `${FSLDIR}/bin/imtest ${subjdir}/nodif_brain_mask` -eq 0 ]; then
echo "${subjdir}/nodif_brain_mask not found"
exit 1
fi
if [ -e ${subjdir}.${modelname}/xfms/eye.mat ]; then
echo "${subjdir} has already been processed: ${subjdir}.${modelname}."
echo "Delete or rename ${subjdir}.${modelname} before repeating the process."
exit 1
fi
echo Making output directory structure
mkdir -p ${subjdir}.${modelname}/
mkdir -p ${subjdir}.${modelname}/diff_parts
mkdir -p ${subjdir}.${modelname}/logs
mkdir -p ${subjdir}.${modelname}/Dtifit
mkdir -p ${subjdir}.${modelname}/${step1}
mkdir -p ${subjdir}.${modelname}/${step1}/diff_parts
mkdir -p ${subjdir}.${modelname}/${step1}/logs
mkdir -p ${subjdir}.${modelname}/${step2}
mkdir -p ${subjdir}.${modelname}/${step2}/diff_parts
mkdir -p ${subjdir}.${modelname}/${step2}/logs
part=0
echo Copying files to output directory
${FSLDIR}/bin/imcp ${subjdir}/nodif_brain_mask ${subjdir}.${modelname}
if [ `${FSLDIR}/bin/imtest ${subjdir}/nodif` = 1 ] ; then
${FSLDIR}/bin/fslmaths ${subjdir}/nodif -mas ${subjdir}/nodif_brain_mask ${subjdir}.${modelname}/nodif_brain
fi
# Specify Common Fixed Parameters
CFP_file=$subjdir.${modelname}/CFP
cp ${subjdir}/bvecs $subjdir.${modelname}
cp ${subjdir}/bvals $subjdir.${modelname}
echo $subjdir.${modelname}/bvecs > $CFP_file
echo $subjdir.${modelname}/bvals >> $CFP_file
N=`wc -w ${subjdir}/bvals | awk '{print $1;}'`
for dparam in dax diso; do
for file in $subjdir.${modelname}/$dparam $subjdir.${modelname}/ddtmp; do
if [ -f $file ]; then rm $file; fi
done
if [ "$dparam" == "dax" ]; then d=$dax; fi
if [ "$dparam" == "diso" ]; then d=$diso; fi
for ((i=1;i<=$N;i++)); do echo $d >> $subjdir.${modelname}/ddtmp; done
echo `cat $subjdir.${modelname}/ddtmp` > $subjdir.${modelname}/$dparam
echo $subjdir.${modelname}/$dparam >> $CFP_file
done
rm $subjdir.${modelname}/ddtmp
#Set more options
opts=$opts" --data=${subjdir}/data --maskfile=$subjdir.${modelname}/nodif_brain_mask --forcedir --CFP=$CFP_file"
# Calculate S0 with the mean of the volumes with bval<50
bvals=`cat ${subjdir}/bvals`
mkdir -p ${subjdir}.${modelname}/temporal
pos=0
for i in $bvals; do
if [ $i -le 50 ]; then
fslroi ${subjdir}/data ${subjdir}.${modelname}/temporal/volume_$pos $pos 1
fi
pos=$(($pos + 1))
done
fslmerge -t ${subjdir}.${modelname}/temporal/S0s ${subjdir}.${modelname}/temporal/volume*
fslmaths ${subjdir}.${modelname}/temporal/S0s -Tmean ${subjdir}.${modelname}/S0
rm -rf ${subjdir}.${modelname}/temporal
# Specify Fixed parameters: S0
FixPFile=${subjdir}.${modelname}/FixP
echo ${subjdir}.${modelname}/S0 >> $FixPFile
##############################################################################
################################ First Dtifit ###############################
##############################################################################
echo "Queue Dtifit"
PathDTI=${subjdir}.${modelname}/Dtifit
dtifit_command="${bindir}/Run_dtifit.sh ${subjdir} ${subjdir}.${modelname} ${bindir}"
#SGE
dtifitProcess=`${FSLDIR}/bin/fsl_sub $queue -l $PathDTI/logs -N dtifit $dtifit_command`
##### Model Parameters: fiso, fintra, kappa, th, ph ######
#############################################################
##################### Grid Search Step ######################
#############################################################
echo "Queue GridSearch process"
PathStep1=$subjdir.${modelname}/${step1}
# Create file to specify initialisation parameters (2 parameters: th,ph)
InitializationFile=$PathStep1/InitializationParameters
echo "" > $InitializationFile #fiso
echo "" >> $InitializationFile #fintra
echo "" >> $InitializationFile #kappa
echo ${PathDTI}/dtifit_V1_th.nii.gz >> $InitializationFile #th
echo ${PathDTI}/dtifit_V1_ph.nii.gz >> $InitializationFile #ph
# Do GridSearch (fiso,fintra,kappa)
GridFile=$PathStep1/GridSearch
echo "search[0]=(0.0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0)" > $GridFile #fiso
echo "search[1]=(0.0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0)" >> $GridFile #fintra
echo "search[2]=(1,2,3,4,5,6,7,8)" >> $GridFile #kappa
partsdir=$PathStep1/diff_parts
outputdir=$PathStep1
Step1Opts=$opts" --outputdir=$outputdir --partsdir=$partsdir --FixP=$FixPFile --gridSearch=$GridFile --no_LevMar --init_params=$InitializationFile --fixed=3,4"
postproc=`${bindir}/jobs_wrapper.sh $PathStep1 $dtifitProcess $modelname GS $njobs $Step1Opts`
###############################################################
##################### Fit only Fractions ######################
###############################################################
echo "Queue Fitting Fractions process"
PathStep2=$subjdir.${modelname}/${step2}
# Create file to specify initialisation parameters (2 parameters: fiso,fintra)
InitializationFile=$PathStep2/InitializationParameters
echo $PathStep1/Param_0_samples > $InitializationFile #fiso
echo $PathStep1/Param_1_samples >> $InitializationFile #fintra
echo $PathStep1/Param_2_samples >> $InitializationFile #kappa
echo ${PathDTI}/dtifit_V1_th.nii.gz >> $InitializationFile #th
echo ${PathDTI}/dtifit_V1_ph.nii.gz >> $InitializationFile #ph
partsdir=$PathStep2/diff_parts
outputdir=$PathStep2
Step2Opts=$opts" --outputdir=$outputdir --partsdir=$partsdir --FixP=$FixPFile --init_params=$InitializationFile --fixed=2,3,4"
postproc=`${bindir}/jobs_wrapper.sh $PathStep2 $postproc $modelname FitFractions $njobs $Step2Opts`
######################################################################################
######################### Fit all the parameters of the Model ########################
######################################################################################
echo "Queue Fitting process"
# Create file to specify initialization parameters (5 parameters: fiso,fintra,kappa,th,ph)
InitializationFile=$subjdir.${modelname}/InitializationParameters
echo $PathStep2/Param_0_samples > $InitializationFile #fiso
echo $PathStep2/Param_1_samples >> $InitializationFile #fintra
echo ${PathStep1}/Param_2_samples >> $InitializationFile #kappa
echo ${PathDTI}/dtifit_V1_th.nii.gz >> $InitializationFile #th
echo ${PathDTI}/dtifit_V1_ph.nii.gz >> $InitializationFile #ph
partsdir=${subjdir}.${modelname}/diff_parts
outputdir=${subjdir}.${modelname}
ModelOpts=$opts" --outputdir=$outputdir --partsdir=$partsdir --FixP=$FixPFile --init_params=$InitializationFile $lastStepModelOpts"
postproc=`${bindir}/jobs_wrapper.sh ${subjdir}.${modelname} $postproc $modelname FitProcess $njobs $ModelOpts`
#########################################
### Calculate Dispersion Index & dyads ###
##########################################
finish_command="/home/fs0/amyh/cudimot/mymodels/NODDI_Watson_diff/${modelname}_finish.sh ${subjdir}.${modelname}"
#SGE
finishProcess=`${FSLDIR}/bin/fsl_sub $queue -l ${subjdir}.${modelname}/logs -N ${modelname}_finish -j $postproc $finish_command`
endt=`date +%s`
runtime=$((endt-start))
#echo Runtime $runtime
echo Everything Queued
#define MACRO template <typename T> __device__ inline
#define MPI 3.14159265358979323846
#define M_SQRT_PI 1.772453850905516
#define NMAX 6
#define H 0.4
#define A1 (2.0/3.0)
#define A2 0.4
#define A3 (2.0/7.0)
MACRO T dawson(T x){ // From Numerical Recipes in C
//Returns Dawson’s integral for any real x
int i,n0;
T d1,d2,e1,e2,sum,x2,xp,xx,ans;
T c[NMAX+1];
int init=0; // Flag is 0 if we need to initialize, else 1
if (init==0){
init=1;
for(i=0;i<NMAX;i++){
c[i]=((T)2.0*(i+1)-(T)1.0)*(T)H;
c[i]=exp_gpu(-c[i]*c[i]);
}
}
if(fabs_gpu(x) < (T)0.2){ // Use series expansion.
x2=x*x;
ans=x*((T)1.0-(T)A1*x2*((T)1.0-(T)A2*x2*((T)1.0-(T)A3*x2)));
}else{ // Use sampling theorem representation.
xx=fabs_gpu(x);
n0=2*(int)((T)0.5*xx/(T)H+(T)0.5);
xp=xx-n0*(T)H;
e1=exp_gpu((T)2.0*xp*(T)H);
e2=e1*e1;
d1=n0+1;
d2=d1-(T)2.0;
sum=0.0;
for (i=0;i<NMAX;i++,d1+=(T)2.0,d2-=(T)2.0,e1*=e2){
sum += c[i]*(e1/d1+(T)1.0/(d2*e1));
}
//ans=(T)0.5641895835*SIGN(exp_gpu(-xp*xp),x)*sum;
ans=exp_gpu(-xp*xp);
if(ans<0) ans=-ans; // magnitude
if(x<0) ans=-ans; // * sign(x)
ans=(T)0.5641895835*ans*sum; //Constant is 1/sqrt(pi)
}
return ans;
}
MACRO void WatsonHinderedDiffusionCoeff(//input
T kappa,
// dPar is a constant
T dParallel,
T dPerp,
//output
T& dPar_equivalent,
T& dPerp_equivalent)
{
T dParMdPerp = dParallel - dPerp;
if (kappa < (T)1e-5){
T dParP2dPerp = dParallel + (T)2.0*dPerp;
T k2 = kappa*kappa;
dPar_equivalent = dParP2dPerp/(T)3.0
+ (T)4.0*dParMdPerp*kappa/(T)45.0
+ (T)8.0*dParMdPerp*k2/(T)945.0;
dPerp_equivalent = dParP2dPerp/(T)3.0
- (T)2.0*dParMdPerp*kappa/(T)45.0
- (T)4.0*dParMdPerp*k2/(T)945.0;
}else{
T sk;
T dawsonf;
//if(kapppa<0){
// never. kappa is always >=0
//}else{
sk = sqrt_gpu(kappa);
//dawsonf= (T)0.5*exp_gpu(-kappa)*sqrt_gpu(M_PI)*erfi_sk;
dawsonf=dawson(sk);
T factor = sk/dawsonf;
dPar_equivalent = (-dParMdPerp+(T)2.0*dPerp*kappa+dParMdPerp*factor) / ((T)2.0*kappa);
dPerp_equivalent = (dParMdPerp+(T)2.0*(dParallel+dPerp)*kappa-dParMdPerp*factor) / ((T)4.0*kappa);
}
}
MACRO void WatsonSHCoeff(T k, T* coeff)
{
//commputes the spherical harmonic (SH) coefficients of the Watson's distribution with the concentration parameter k (kappa) and 12th order
coeff[0]= (T)2.0*sqrt_gpu((T)MPI);
if(k>30){
//very large kappa
T lnkd = log_gpu(k) - log_gpu((T)30.0);
T lnkd2 = lnkd*lnkd;