Newer
Older
/* MELODIC - Multivariate exploratory linear optimized decomposition into
independent components
meldata.cc - data handler / container class
Christian F. Beckmann, FMRIB Analysis Group
Copyright (C) 1999-2013 University of Oxford */
using namespace Utilities;
using namespace NEWIMAGE;
ReturnMatrix MelodicData::process_file(string fname, int numfiles)
dbgmsg(string("START: process_file") << endl);
Matrix tmpData;
{
volume4D<float> RawData;
//read data
message("Reading data file " << fname << " ... ");
read_volume4D(RawData,fname);
message(" done" << endl);
del_vols(RawData,opts.dummy.value());
Mean += meanvol(RawData)/numfiles;
//estimate smoothness
if((Resels == 0)&&(!opts.filtermode))
Resels = est_resels(RawData,Mask);
//convert to percent BOLD signal change
if(opts.pbsc.value()){
message(" Converting data to percent BOLD signal change ...");
Matrix meanimg = convert_to_pbsc(tmpData);
meanR = meanimg.Row(1);
message(" done" << endl);
if(opts.remove_meanvol.value())
{
message(string(" Removing mean image ..."));
message(" done" << endl);
}
else meanR=ones(1,tmpData.Ncols());
if(opts.remove_meantc.value()){
//convert to power spectra
if(opts.pspec.value()){
message(" Converting data to powerspectra ...");
tmpData = calc_FFT(tmpData);
message(" done" << endl);
}
//switch dimension in case temporal ICA is required
if(opts.temporal.value()){
message(string(" Switching dimensions for temporal ICA") << endl);
tmpData = tmpData.t();
Matrix tmp;
tmp = meanC;
meanC = meanR.t();
meanR = tmp.t();
message(" Data size : " << Data.Nrows() << " x " << Data.Ncols() <<endl);
}
message(" Normalising by voxel-wise variance ...");
outMsize("stdDev",stdDev);
// if(stdDev.Storage()==0)
stdDev = varnorm(tmpData,std::min(30,tmpData.Nrows()-1),
opts.vn_level.value(), opts.econ.value());
// else
// stdDev += varnorm(tmpData,std::min(30,tmpData.Nrows()-1),
// opts.vn_level.value(), opts.econ.value())/numfiles;
//INSTACORRS
Matrix tmpTC;
tmpTC = tmpData * insta_maps.t();
if(opts.insta_fn.value().length()>0){
if(opts.insta_idx.value()<1 || opts.insta_idx.value()>tmpTC.Ncols()){
cerr << "ERROR:: INSTACORR index is wrong \n\n";
exit(2);
}
Matrix tmpRef = tmpTC.Column(opts.insta_idx.value());
if(opts.insta_idx.value()>1){
// swap columns
dbgmsg(string("INSTACORR: swap columns") << endl);
tmpTC.Column(opts.insta_idx.value()) << tmpTC.Column(1);
tmpTC.Column(1) << tmpRef;
}
if(opts.insta_partial.value() && tmpTC.Ncols()>1){
// partal correlations
dbgmsg(string("INSTACORR: partial analysis") << endl);
Matrix tmpConf = tmpTC.Columns(2,tmpTC.Ncols());
tmpData -= tmpConf * (pinv(tmpConf) * tmpData);
tmpRef -= tmpConf * (pinv(tmpConf) * tmpRef);
}
if(opts.insta_varnorm.value()){
Matrix vscales = pow(stdev(tmpData,1),-1);
varnorm(tmpData,vscales);
varnorm(tmpRef,pow(stdev(tmpRef,1),-1));
}
// Shur product
dbgmsg(string("END: process_file") << endl);
return tmpData;
}
Matrix MelodicData::expand_mix()
{
Matrix out;
out = expand_dimred(mixMatrix);
return out;
}
Matrix MelodicData::expand_dimred(const Matrix& Mat)
{
int first, last;
first = 1;
last = DWM.at(0).Ncols();
Matrix tmp = DWM.at(0) * Mat.Rows(first,last);
for(unsigned int ctr = 1; ctr < DWM.size(); ctr++){
first = last + 1;
last += DWM.at(ctr).Ncols();
tmp &= DWM.at(ctr) * Mat.Rows(first, last);
return tmp;
}
Matrix MelodicData::reduce_dimred(const Matrix& Mat)
{
int first, last;
first = 1;
last = WM.at(0).Ncols();
Matrix tmp = WM.at(0) * Mat.Rows(first,last);
for(unsigned int ctr = 1; ctr < WM.size(); ctr++){
first = last + 1;
last += WM.at(ctr).Ncols();
tmp &= WM.at(ctr) * Mat.Rows(first, last);
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
Matrix tmp, tmpT, tmpS, tmpT2, tmpS2, tmpT3;
tmp = expand_dimred(mixMatrix);
tmpT = zeros(tmp.Nrows()/numfiles, tmp.Ncols());
tmpS = ones(numfiles, tmp.Ncols());
outMsize("tmp",tmp);
outMsize("tmpT",tmpT);
outMsize("tmpS",tmpS);
dbgmsg(string(" approach ") << opts.approach.value() << endl);
if(opts.approach.value()==string("tica")){
message("Calculating T- and S-modes " << endl);
explained_var = krfact(tmp,tmpT,tmpS);
Tmodes.clear(); Smodes.clear();
for(int ctr = 1; ctr <= tmp.Ncols(); ctr++){
tmpT3 << reshape(tmp.Column(ctr),tmpT.Nrows(),numfiles);
outMsize("tmpT3", tmpT3);
tmpT2 << tmpT.Column(ctr);
tmpS2 << tmpS.Column(ctr);
tmpT3 << SP(tmpT3,pow(ones(tmpT3.Nrows(),1)*tmpS2.t(),-1));
if(numfiles>1)
tmpT2 |= tmpT3;
if(mean(tmpS2,1).AsScalar()<0){
tmpT2*=-1.0;
tmpS2*=-1.0;
}
add_Tmodes(tmpT2);
add_Smodes(tmpS2);
}
else{
Tmodes.clear();
Smodes.clear();
for(int ctr = 1; ctr <= tmp.Ncols(); ctr++){
tmpT3 << tmp.Column(ctr);
add_Tmodes(tmpT3);
}
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
if(opts.approach.value()!=string("concat")){
//add GLM OLS fit
dbgmsg(string(" GLM fitting ") << endl);
if(Tdes.Storage()){
Matrix alltcs = Tmodes.at(0).Column(1);
for(int ctr=1; ctr < (int)Tmodes.size();ctr++)
alltcs|=Tmodes.at(ctr).Column(1);
if((alltcs.Nrows()==Tdes.Nrows())&&(Tdes.Nrows()>Tdes.Ncols()))
glmT.olsfit(alltcs,Tdes,Tcon);
}
if(Sdes.Storage()){
Matrix alltcs = Smodes.at(0);
for(int ctr=1; ctr < (int)Smodes.size();ctr++)
alltcs|=Smodes.at(ctr);
if((alltcs.Nrows()==Sdes.Nrows())&&(Sdes.Nrows()>Sdes.Ncols()&&alltcs.Nrows()>2))
glmS.olsfit(alltcs,Sdes,Scon);
}
}
// else{
// dbgmsg(string(" Bypassing krfac ") << endl);
// add_Tmodes(tmp);
// add_Smodes(tmpS);
// }
}
void MelodicData::dual_regression()
{
dbgmsg(string("START: dual_regression") << endl);
Tmodes.clear();
Smodes.clear();
bool tmpvarnorm = opts.varnorm.value();
// Switch off variance normalisation
opts.varnorm.set_T(false);
Log drO;
if(opts.dr_out.value())
drO.makeDir(logger.appendDir("dr"),"dr.log");
Matrix tmpcont = diag(ones(IC.Nrows(),1)), s1,s2, tmpData, alltcs;
basicGLM tmpglm;
for(int ctr = 0; ctr < numfiles; ctr++){
tmpData = process_file(opts.inputfname.value().at(ctr), numfiles);
//may want to remove the spatial means first
tmpglm.olsfit(remmean(tmpData.t(),1),remmean(IC.t(),1),tmpcont);
outMsize("s1",s1);
outMsize("alltcs",alltcs);
if(alltcs.Storage()==0)
// output DR
if(opts.dr_out.value()){
dbgmsg(string("START: dual_regression output") << endl);
write_ascii_matrix(drO.appendDir("dr_stage1_subject"+num2str(ctr,4)+".txt"),s1);
//des_norm
s1 = SP(s1,ones(s1.Nrows(),1)*pow(stdev(s1,1),-1));
tmpglm.olsfit(remmean(tmpData),remmean(s1,1),tmpcont);
s2=tmpglm.get_beta();
save4D(s2,string("dr/dr_stage2_subject"+num2str(ctr,4)));
save4D(s2,string("dr/dr_stage2_subject"+num2str(ctr,4)+"_Z"));
}
}
for(int ctr = 1; ctr <= alltcs.Ncols(); ctr++){
tmpcont << alltcs.Column(ctr);
add_Tmodes(tmpcont);
for(int ctrC = 1; ctrC <=IC.Nrows(); ctrC++){
Matrix tmpall = zeros(numfiles,IC.Ncols());
string fnout = string("dr/dr_stage2_ic"+num2str(ctrC-1,4));
for(int ctrS = 0; ctrS < numfiles; ctrS++){
string fnin = logger.appendDir(string("dr/dr_stage2_subject"+num2str(ctrS,4)));
dbgmsg(fnout << endl << fnin << endl);
volume4D<float> vol;
read_volume4DROI(vol,fnin,0,0,0,ctrC-1,-1,-1,-1,ctrC-1);
Matrix tmp2 = vol.matrix(Mask);
tmpall.Row(ctrS+1) << vol.matrix(Mask);
}
save4D(tmpall,fnout);
}
opts.varnorm.set_T(tmpvarnorm);
dbgmsg(string("END: dual_regression") << endl);
}
dbgmsg(string("START: set_TSmode")<< endl);
if(opts.dr.value())
dual_regression();
else
set_TSmode_depr();
dbgmsg(string("END: set_TSmode")<< endl);
dbgmsg(string("START: setup_classic") << endl);
if(numfiles > 1 && opts.joined_vn.value()){
alldat = process_file(opts.inputfname.value().at(0), numfiles) / numfiles;
if(opts.pca_dim.value() > alldat.Nrows()-2){
cerr << "ERROR:: too many components selected \n\n";
exit(2);
}
tmpData = process_file(opts.inputfname.value().at(ctr), numfiles) / numfiles;
if(tmpData.Ncols() == alldat.Ncols() && tmpData.Nrows() == alldat.Nrows())
if(opts.approach.value()==string("tica")){
cerr << "ERROR:: data dimensions do not match, TICA not possible \n\n";
exit(2);
}
if(tmpData.Ncols() == alldat.Ncols()){
int mindim = min(alldat.Nrows(),tmpData.Nrows());
alldat = alldat.Rows(1,mindim);
tmpData = tmpData.Rows(1,mindim);
alldat += tmpData;
}
else
message("Data dimensions do not match - ignoring "+opts.inputfname.value().at(ctr) << endl);
//update mask
if(opts.update_mask.value()){
message("Excluding voxels with constant value ...");
update_mask(Mask, alldat);
message(" done" << endl);
}
if((numfiles > 1 ) && opts.joined_vn.value() && tmpvarnorm){
message(endl<<"Normalising jointly by voxel-wise variance ...");
stdDev = varnorm(alldat,alldat.Nrows(),opts.vn_level.value(),opts.econ.value());
stdDevi = pow(stdDev,-1);
message(" done" << endl);
message(endl << "Initial data size : "<<alldat.Nrows()<<" x "<<alldat.Ncols()<<endl<<endl);
//estimate model order
Matrix tmpPPCA;
RowVector AdjEV, PercEV;
Matrix tmpE;
SymmetricMatrix Corr;
order = ppca_dim(remmean(alldat,2), RXweight, tmpPPCA, AdjEV, PercEV, Corr, pcaE, pcaD, Resels, opts.pca_est.value());
if (opts.paradigmfname.value().length()>0)
order += param.Ncols();
if(opts.pca_dim.value() == 0){
opts.pca_dim.set_T(order);
if(opts.pca_dim.value() < 0){
opts.pca_dim.set_T(min(order,-1*opts.pca_dim.value()));
PPCA=tmpPPCA;
}
if (opts.paradigmfname.value().length()>0){
Matrix tmpPscales;
tmpPscales = param.t() * alldat;
paramS = stdev(tmpPscales.t());
calc_white(pcaE, pcaD, order, param, paramS, whiteMatrix, dewhiteMatrix);
}else
calc_white(pcaE, pcaD, order, whiteMatrix, dewhiteMatrix);
if(opts.debug.value()){
outMsize("pcaE",pcaE); saveascii(pcaE,"pcaE");
outMsize("pcaD",pcaD); saveascii(pcaD,"pcaD");
outMsize("AdjEV",AdjEV); saveascii(AdjEV,"AdjEV");
outMsize("PercEV",PercEV); saveascii(PercEV,"PercEV");
outMsize("tmpPPCA",tmpPPCA); saveascii(tmpPPCA,"tmpPPCA");
outMsize("whiteMatrix",whiteMatrix); saveascii(whiteMatrix,"whiteMatrix");
outMsize("dewhiteMatrix",dewhiteMatrix); saveascii(dewhiteMatrix,"dewhiteMatrix");
}
if(numfiles == 1){
Data = alldat;
Matrix tmp = IdentityMatrix(Data.Nrows());
DWM.push_back(tmp);
WM.push_back(tmp);
}
tmpData = process_file(opts.inputfname.value().at(ctr), numfiles);
dbgmsg("tmpData normalisation"<< endl);
dbgmsg("stdDev " << stdDev(1,2)<< endl);
dbgmsg("tmpData " << tmpData.SubMatrix(1,1,1,2)<< endl);
SP3(tmpData,pow(stdDev,-1));
message(" Individual whitening in a " << order << " dimensional subspace " << endl);
std_pca(tmpData, RXweight, Corr, pcaE, pcaD, opts.econ.value());
calc_white(pcaE, pcaD, order, newWM, newDWM);
if(!opts.dr_pca.value()){
std_pca(whiteMatrix*tmpData, RXweight, Corr, pcaE, pcaD, opts.econ.value());
calc_white(pcaE, pcaD, order, newWM, newDWM);
newDWM=(dewhiteMatrix*newDWM);
newWM=(newWM*whiteMatrix);
}
else{
Matrix tmp1, tmp2;
std_pca(tmp1 * tmpData, RXweight, Corr, pcaE, pcaD, opts.econ.value());
calc_white(pcaE, pcaD, order, newWM, newDWM);
newDWM=(tmp2*newDWM);
newWM=(newWM * tmp1);
}
DWM.push_back(newDWM);
WM.push_back(newWM);
tmpData = newWM * tmpData;
//concatenate Data
if(Data.Storage() == 0)
Data = tmpData;
else
Data &= tmpData;
dbgmsg(string("END: setup_classic") << endl);
dbgmsg(string("START: setup_migp") << endl);
std::vector<int> myctr;
for (int i=0; i< numfiles ; ++i) myctr.push_back(i);
if(opts.migp_shuffle.value()){
message("Randomising input file order" << endl);
std::random_shuffle ( myctr.begin(), myctr.end() );
}
Matrix tmpData;
bool tmpvarnorm = opts.varnorm.value();
if(numfiles > 1 && opts.joined_vn.value()){
opts.varnorm.set_T(false);
}
tmpData = process_file(opts.inputfname.value().at(myctr.at(ctr)), numfiles) / numfiles;
if (opts.migpN.value()==0){
opts.migpN.set_T(2*tmpData.Nrows()-1);
}
save4D(tmpData,string("preproc_dat") + num2str(ctr+1));
if(Data.Storage()==0)
Data = tmpData;
else
Data &= tmpData;
if(Data.Nrows() > opts.migp_factor.value()*opts.migpN.value() || ctr==numfiles-1){
message(" Reducing data matrix to a " << opt.migpN.value() << " dimensional subspace " << endl);
Matrix pcaE;
SymmetricMatrix Corr;
std_pca(Data, RXweight, Corr, pcaE, pcaD, opts.econ.value());
pcaE = pcaE.Columns(pcaE.Ncols()-opts.migpN.value()+1,pcaE.Ncols());
Data = pcaE.t() * Data;
}
}
//update mask
if(opts.update_mask.value()){
message(endl<< "Excluding voxels with constant value ...");
Matrix tmp = IdentityMatrix(Data.Nrows());
DWM.push_back(tmp);
WM.push_back(tmp);
opts.varnorm.set_T(tmpvarnorm);
if(opts.varnorm2.value()){
message(" Normalising by voxel-wise variance ...");
stdDev = varnorm(Data,std::min(30,Data.Nrows()-1),
opts.vn_level.value(), opts.econ.value());
message(" done" << endl);
}
dbgmsg(string("END: setup_migp") << endl);
dbgmsg(string("START: setup") << endl);
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
numfiles = (int)opts.inputfname.value().size();
setup_misc();
if(opts.debug.value())
memmsg(" after setup_misc ");
if(opts.filtermode){ // basic setup for filtering only
Data = process_file(opts.inputfname.value().at(0));
}
else{
if((numfiles > 1) && (opts.approach.value()==string("defl") || opts.approach.value()==string("symm")))
opts.approach.set_T("concat");
if(opts.migp.value())
setup_migp();
else
setup_classic();
}
message(endl << " Data size : "<<Data.Nrows()<<" x "<<Data.Ncols()<<endl<<endl);
outMsize("stdDev",stdDev);
//meanC=mean(Data,2);
if(opts.debug.value())
save4D(Data,"concat_data");
//save the mean & mask
save_volume(Mask,logger.appendDir("mask"));
save_volume(Mean,logger.appendDir("mean"));
void MelodicData::setup_misc()
{
dbgmsg(string("START: setup_misc") << endl);
//create mask
create_mask(Mask);
//setup background image
if(opts.bgimage.value()>""){
read_volume(background,opts.bgimage.value());
if(!samesize(Mean,background)){
cerr << "ERROR:: background image and data have different dimensions \n\n";
exit(2);
}
}else{
background = Mean;
}
if(!samesize(Mean,Mask)){
cerr << "ERROR:: mask and data have different dimensions \n\n";
exit(2);
}
//reset mean
Mean *= 0;
//set up weighting
}
//seed the random number generator
double tmptime = time(NULL);
if ( opts.seed.value() != -1 ) {
tmptime = opts.seed.value();
}
if(opts.paradigmfname.value().length()>0){
message(" Use columns in " << opts.paradigmfname.value()
<< " for PCA initialisation" <<endl);
param = read_ascii_matrix(opts.paradigmfname.value());
Matrix tmpPU, tmpPV;
DiagonalMatrix tmpPD;
SVD(param, tmpPD, tmpPU, tmpPV);
param = tmpPU;
opts.pca_dim.set_T(std::max(opts.pca_dim.value(), param.Ncols()+3));
if(opts.debug.value()){
outMsize("Paradigm",param); saveascii(param,"param");
//opts.guessfname.set_T(opts.paradigmfname.value());
}
//read in post-proc design matrices etc
if(opts.fn_Tdesign.value().length()>0)
Tdes = read_ascii_matrix(opts.fn_Tdesign.value());
if(opts.fn_Sdesign.value().length()>0)
Sdes = read_ascii_matrix(opts.fn_Sdesign.value());
if(opts.fn_Tcon.value().length()>0)
Tcon = read_ascii_matrix(opts.fn_Tcon.value());
if(opts.fn_Scon.value().length()>0)
Scon = read_ascii_matrix(opts.fn_Scon.value());
if(opts.fn_TconF.value().length()>0)
TconF = read_ascii_matrix(opts.fn_TconF.value());
if(opts.fn_SconF.value().length()>0)
SconF = read_ascii_matrix(opts.fn_SconF.value());
if(numfiles>1 && Sdes.Storage() == 0){
Sdes = ones(numfiles,1);
if(Scon.Storage() == 0){
Scon = ones(1,1);
Scon &= -1*Scon;
}
}
//INSTACORRS
if(opts.insta_fn.value().length()>0){
message(" Reading in " << opts.insta_fn.value()
<< " for instantaneous correlation analysis" <<endl);
volume4D<float> tmp_im;
read_volume4D(tmp_im,opts.insta_fn.value());
if(!samesize(Mean,tmp_im[0])){
cerr << "ERROR:: instacorr mask and data have different voxel dimensions \n\n";
exit(2);
}
insta_maps = tmp_im.matrix(Mask);
}
dbgmsg(string("END: setup_misc") << endl);
void MelodicData::save()
{
//check for temporal ICA
if(opts.temporal.value()){
message(string("temporal ICA: transform back the data ... "));
Matrix tmpIC = mixMatrix.t();
mixMatrix=IC.t();
IC=tmpIC;
unmixMatrix=pinv(mixMatrix);
Data = Data.t();
tmpIC = meanC;
meanC = meanR.t();
meanR = tmpIC.t();
// whiteMatrix = whiteMatrix.t;
// dewhiteMatrix = dewhiteMatrix.t();
message(string("done") << endl);
opts.temporal.set_T(false); // Do not switch again!
message(endl << "Writing results to : " << endl);
if((IC.Storage()>0)&&(opts.output_origIC.value())&&(after_mm==false))
save4D(IC,opts.outputfname.value() + "_oIC");
//Output IC -- adjusted for noise
if(IC.Storage()>0){
//Matrix ICadjust;
if(after_mm){
save4D(IC,opts.outputfname.value() + "_IC");
// ICadjust = IC;
}
else{
Matrix resids = stdev(Data - mixMatrix * IC);
for(int ctr=1;ctr<=resids.Ncols();ctr++)
if(resids(1,ctr) < 0.05)
resids(1,ctr)=1;
// stdNoisei = pow(stdev(Data - mixMatrix * IC)*
// std::sqrt((float)(Data.Nrows()-1))/
// std::sqrt((float)(Data.Nrows()-IC.Nrows())),-1);
stdNoisei = pow(resids*
std::sqrt((float)(Data.Nrows()-1))/
std::sqrt((float)(Data.Nrows()-IC.Nrows())),-1);
ColumnVector diagvals;
diagvals=pow(diag(unmixMatrix*unmixMatrix.t()),-0.5);
save4D(SP(IC,diagvals*stdNoisei),opts.outputfname.value() + "_IC");
}
saveascii(expand_mix(), opts.outputfname.value() + "_mix");
mixFFT=calc_FFT(expand_mix(), opts.logPower.value());
saveascii(mixFFT,opts.outputfname.value() + "_FTmix");
//Output PPCA
if(PPCA.Storage()>0)
saveascii(PPCA, opts.outputfname.value() + "_PPCA");
//Output ICstats
if(ICstats.Storage()>0)
saveascii(ICstats,opts.outputfname.value() + "_ICstats");
if(opts.output_unmix.value() && unmixMatrix.Storage()>0)
saveascii(unmixMatrix,opts.outputfname.value() + "_unmix");
//Output Mask
message(" "<< logger.appendDir("mask") <<endl);
//Output mean
if(opts.output_mean.value() && meanC.Storage()>0 && meanR.Storage()>0){
saveascii(meanR,opts.outputfname.value() + "_meanR");
saveascii(meanC,opts.outputfname.value() + "_meanC");
}
//Output white
if(opts.output_white.value() && whiteMatrix.Storage()>0&&
dewhiteMatrix.Storage()>0){
saveascii(whiteMatrix,opts.outputfname.value() + "_white");
saveascii(dewhiteMatrix,opts.outputfname.value() + "_dewhite");
Matrix tmp;
tmp=calc_FFT(dewhiteMatrix, opts.logPower.value());
saveascii(tmp,opts.outputfname.value() + "_FTdewhite");
//Output PCA
if(opts.output_pca.value() && pcaD.Storage()>0&&pcaE.Storage()>0){
saveascii(pcaE,opts.outputfname.value() + "_pcaE");
saveascii((Matrix) diag(pcaD),opts.outputfname.value() + "_pcaD");
if(whiteMatrix.Ncols()==Data.Ncols())
PCAmaps = dewhiteMatrix.t();
else
PCAmaps = whiteMatrix * Data;
save4D(PCAmaps,opts.outputfname.value() + "_pca");
}
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
int MelodicData::remove_components()
{
message("Reading " << opts.filtermix.value() << endl)
mixMatrix = read_ascii_matrix(opts.filtermix.value());
if (mixMatrix.Storage()<=0) {
cerr <<" Please specify the mixing matrix correctly" << endl;
exit(2);
}
unmixMatrix = pinv(mixMatrix);
IC = unmixMatrix * Data;
string tmpstr;
tmpstr = opts.filter.value();
Matrix noiseMix;
Matrix noiseIC;
int ctr=0;
char *p;
char t[1024];
const char *discard = ", [];{(})abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ~!@#$%^&*_-=+|\':><./?";
message("Filtering the data...");
strcpy(t, tmpstr.c_str());
p=strtok(t,discard);
ctr = atoi(p);
if(ctr>0 && ctr<=mixMatrix.Ncols()){
message(" "<< ctr );
noiseMix = mixMatrix.Column(ctr);
noiseIC = IC.Row(ctr).t();
cerr << endl<< "component number "<<ctr<<" does not exist" << endl;
}
do{
p=strtok(NULL,discard);
if(p){
message(" "<<ctr);
noiseMix |= mixMatrix.Column(ctr);
noiseIC |= IC.Row(ctr).t();
}
else{
cerr << endl<< "component number "<<ctr<<" does not exist" << endl;
}
}
}while(p);
message(endl);
Matrix newData;
outMsize("DATA",Data);
outMsize("IC",IC);
outMsize("noiseIC",noiseIC);
outMsize("noiseMix",noiseMix);
outMsize("meanR",meanR);
outMsize("meanC",meanC);
if(meanR.Storage()>0)
newData = newData + ones(newData.Nrows(),1)*meanR;
read_volume4D(tmp,opts.inputfname.value().at(0));
tmp.setmatrix(newData,Mask);
save_volume4D(tmp,logger.appendDir(opts.outputfname.value() + "_ICAfiltered"));
return 0;
} // int remove_components()
void MelodicData::create_RXweight()
{
message("Reading the weights for the covariance R_X from file "<< opts.segment.value() << endl);
volume4D<float> tmpRX;
read_volume4D(tmpRX,opts.segment.value());
RXweight = tmpRX.matrix(Mask);
}
if(Resels == 0){
string SM_path = opts.binpath + "smoothest";
string Mask_fname = logger.appendDir("mask");
if(opts.segment.value().length()>0){
}
// Setup external call to smoothest:
char callSMOOTHESTstr[1000];
ostrstream osc(callSMOOTHESTstr,1000);
osc << SM_path << " -d " << data_dim()
<< " -r " << opts.inputfname.value().at(0) << " -m "
<< Mask_fname << " > " << logger.appendDir("smoothest") << '\0';
message(" Calling Smoothest: " << callSMOOTHESTstr << endl);
system(callSMOOTHESTstr);
//read back the results
ifstream in;
string str;
Resels = 1.0;
in.open(logger.appendDir("smoothest").c_str(), ios::in);
if(in>0){
for(int ctr=1; ctr<7; ctr++)
in >> str;
in.close();
if(str!="nan")
Resels = atof(str.c_str());
unsigned long MelodicData::standardise(volume<float>& mask, volume4D<float>& R)
{
for (int z=mask.minz(); z<=mask.maxz(); z++) {
for (int y=mask.miny(); y<=mask.maxy(); y++) {
for (int x=mask.minx(); x<=mask.maxx(); x++) {
if( mask(x,y,z) > 0.5) {
count ++;
if( M > 2 ) {
// For each voxel
// calculate mean and standard deviation...
double Sx = 0.0, SSx = 0.0;
for ( int t = 0; t < M; t++ ) {
float R_it = R(x,y,z,t);
Sx += R_it;
SSx += (R_it)*(R_it);
}
float mean = Sx / M;
float sdsq = (SSx - ((Sx)*(Sx) / M)) / (M - 1) ;
if (sdsq<=0) {
// trap for differences between mask and invalid data
mask(x,y,z)=0;
count--;
} else {
// ... and use them to standardise to N(0, 1).
for ( unsigned short t = 0; t < M; t++ ) {
R(x,y,z,t) = (R(x,y,z,t) - mean) / sqrt(sdsq);
}
}
}
}
}
}
}
return count;
float MelodicData::est_resels(volume4D<float> R, volume<float> mask)
{
message(" Estimating data smoothness ... ");
unsigned long mask_volume = standardise(mask, R);