Skip to content
Snippets Groups Projects
Commit 0299f1b2 authored by Matthew Webster's avatar Matthew Webster
Browse files

This commit was manufactured by cvs2svn to create tag 'fsl-5_0_2'.

Sprout from master 2012-11-12 16:36:20 UTC Matthew Webster <mwebster@fmrib.ox.ac.uk> 'changed binno default to sensible limit'
Delete:
    preproc.cc
    randomforest.m
    rf_lda.cc
parent dadfb995
No related branches found
No related merge requests found
/* preproc.cc
Mark Jenkinson, FMRIB Image Analysis Group
Copyright (C) 2003 University of Oxford */
/* CCOPYRIGHT */
// Pre-processing for lesion segmentation
#define _GNU_SOURCE 1
#define POSIX_SOURCE 1
#include "newimage/newimageall.h"
#include "miscmaths/miscmaths.h"
#include "utils/options.h"
using namespace MISCMATHS;
using namespace NEWIMAGE;
using namespace Utilities;
// The two strings below specify the title and example usage that is
// printed out as the help or usage message
string title="preproc (Version 1.0)\nCopyright(c) 2012, University of Oxford (Mark Jenkinson)";
string examples="preproc [options] -i <imagename> -o <outname>";
// Each (global) object below specificies as option and can be accessed
// anywhere in this file (since they are global). The order of the
// arguments needed is: name(s) of option, default value, help message,
// whether it is compulsory, whether it requires arguments
// Note that they must also be included in the main() function or they
// will not be active.
Option<bool> verbose(string("-v,--verbose"), false,
string("switch on diagnostic messages"),
false, no_argument);
Option<bool> help(string("-h,--help"), false,
string("display this message"),
false, no_argument);
Option<string> inname(string("-i,--in"), string(""),
string("input filename"),
true, requires_argument);
Option<string> outname(string("-o,--out"), string(""),
string("output filename"),
true, requires_argument);
Option<string> mode(string("-m,--mode"), string("3x3"),
string("specify mode as one of: 3x3, 9x9, 3x3x3, 9x9x9"),
false, requires_argument);
int nonoptarg;
////////////////////////////////////////////////////////////////////////////
// Local functions
volume<float> moving_average(const volume<float>& im, int nker, int dir) {
// use the efficient circular storage (one in, one out) method
double *buff;
buff = new double[nker];
volume<float> retim(im);
int n2=(nker-1)/2;
if (dir==1) {
for (int z=im.minz(); z<=im.maxz(); z++) {
for (int y=im.miny(); y<=im.maxy(); y++) {
double movsum=0.0;
int m=1, den=0; // m is index for circular buffer
for (int n=1; n<=nker; n++) { buff[n-1]=0.0; }
for (int x=1; x<=im.xsize()+n2; x++) {
if (x>nker) { movsum-=buff[m-1]; den--; }
if (x<=im.xsize()) {
buff[m-1]=im(x-1,y,z);
movsum+=buff[m-1];
den++;
}
if ((x-n2)>=1) retim(x-n2-1,y,z)=movsum/den;
m++; if (m>nker) m=1;
}
}
}
}
if (dir==2) {
for (int z=im.minz(); z<=im.maxz(); z++) {
for (int x=im.minx(); x<=im.maxx(); x++) {
double movsum=0.0;
int m=1, den=0; // m is index for circular buffer
for (int n=1; n<=nker; n++) { buff[n-1]=0.0; }
for (int y=1; y<=im.ysize()+n2; y++) {
if (y>nker) { movsum-=buff[m-1]; den--; }
if (y<=im.ysize()) {
buff[m-1]=im(x,y-1,z);
movsum+=buff[m-1];
den++;
}
if ((x-n2)>=1) retim(x,y-n2-1,z)=movsum/den;
m++; if (m>nker) m=1;
}
}
}
}
if (dir==3) {
for (int y=im.miny(); y<=im.maxy(); y++) {
for (int x=im.minx(); x<=im.maxx(); x++) {
double movsum=0.0;
int m=1, den=0; // m is index for circular buffer
for (int n=1; n<=nker; n++) { buff[n-1]=0.0; }
for (int z=1; z<=im.zsize()+n2; z++) {
if (z>nker) { movsum-=buff[m-1]; den--; }
if (z<=im.zsize()) {
buff[m-1]=im(x,y,z-1);
movsum+=buff[m-1];
den++;
}
if ((x-n2)>=1) retim(x,y,z-n2-1)=movsum/den;
m++; if (m>nker) m=1;
}
}
}
}
delete [] buff;
return retim;
}
// for example ... print difference of COGs between 2 images ...
int do_work(int argc, char* argv[])
{
volume<float> v1, retv;
read_volume(v1,inname.value());
int dims=2;
int size=3;
if ((mode.value()=="3x3x3") || (mode.value()=="9x9x9")) { dims=3; }
if ((mode.value()=="9x9") || (mode.value()=="9x9x9")) { size=9; }
retv = moving_average(v1,size,1);
retv = moving_average(retv,size,2);
if (dims==3) {
retv = moving_average(retv,size,3);
}
save_volume(retv,outname.value());
return 0;
}
////////////////////////////////////////////////////////////////////////////
int main(int argc,char *argv[])
{
Tracer tr("main");
OptionParser options(title, examples);
try {
// must include all wanted options here (the order determines how
// the help message is printed)
options.add(inname);
options.add(outname);
options.add(mode);
options.add(verbose);
options.add(help);
nonoptarg = options.parse_command_line(argc, argv);
// line below stops the program if the help was requested or
// a compulsory option was not set
if ( (help.value()) || (!options.check_compulsory_arguments(true)) )
{
options.usage();
exit(EXIT_FAILURE);
}
} catch(X_OptionError& e) {
options.usage();
cerr << endl << e.what() << endl;
exit(EXIT_FAILURE);
} catch(std::exception &e) {
cerr << e.what() << endl;
}
// Call the local functions
return do_work(argc,argv);
}
%% MUST PASS IN: training, num_features
%% disp('Loading data');
%% load Pz1Cn21_alltissue
%% training=[newvecs(:,166) newvecs(:,1:162) newvecs(:,163:165)/100];
% the numbers below are the feature numbers of the central intensity values!
criticalfeatures=[6 33 60 141];
disp('Verifying data');
[N,M1]=size(training);
% training is the training set matrix (first column are labels)
% N is number of training cases
M=M1-1; % M is the number of variables in the classifier (less
% output flag)
% mm is the number to use in one particular decision tree
%% What is a good way of choosing this?!?
%% Loop over mm? %% for mm=round(M/50):round(M/3),
%% mm=M/5;
mm=num_features;
NT=500; % number of trees
%% Also need a way of choosing/optimising this number
disp('Initial setup');
samples=1:N;
examplestruct=struct(treefit(training(:,1:3),training(samples,1),'method', 'classification','splitmin',2));
examplestruct=struct(treeprune(examplestruct,'level',0));
forest(1:NT)=examplestruct;
forestf=zeros(NT,mm);
correct=zeros(N,1);
total=zeros(N,1);
disp('Starting training');
for nn=1:NT,
% make a sample training set with bagging (size N)
samples=max(ceil(rand(N,1)*N),1);
% work out where the used and unused samples were
unusedsamps=ones(N,1);
unusedsamps(samples)=0;
unusedsamps=logical(unusedsamps);
% select a random set of mm features to use (no repetition)
fundfeature=0;
while (fundfeature==0),
features=randperm(M);
features=features(1:mm)+1;
sumf=0;
%%%% for cf=1:length(criticalfeatures),
%%%% sumf=sumf+sum(features==criticalfeatures(cf));
%%%% end
fundfeature=1;
if (sumf>0), fundfeature=1; end
if (M<2*length(criticalfeatures)), fundfeature=1; end
end
% grow a tree from randomly selected mm features - one per set
forest(nn)=struct(treefit(training(samples,features),training(samples,1),'method','classification','splitmin',2));
forest(nn)=struct(treeprune(forest(nn),'level',0));
forestf(nn,1:mm)=features;
% evaluate the performance on the unused samples (need to
% accumulate results and take the mode later)
% Note: res has -1 at end as treeval gives [1,2] not [0,1]
res=treeval(forest(nn),training(unusedsamps,features))-1;
correct(unusedsamps)=correct(unusedsamps)+(res==training(unusedsamps,1));
total(unusedsamps)=total(unusedsamps)+1;
if (mod(nn,10)==0), disp([num2str(nn/NT*100),' %']); end
end
disp('Finished training');
%final (internally assessed) result for this random forest
mean(correct./max(total,1))
%% save workspace
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment