Commit e3f6a179 authored by Andrei-Claudiu Roibu's avatar Andrei-Claudiu Roibu 🖥
Browse files

debugged eval-mapper, added del for past checkpoints, fixed bugs

parent 9e8bca8f
......@@ -289,81 +289,37 @@ def evaluate_mapping(mapping_evaluation_parameters):
"""Mapping Evaluator
This function passes through the network an input and generates the rsfMRI outputs.
This function allows the user to either use one or two or three paths.
The convention for the different model paths is as follows:
- model1: coronal
- model2: axial
- model3: saggital
However, this convention can be changed either bellow or the settings file.
Args:
mapping_evaluation_parameters (dict): Dictionary of parameters useful during mapping evaluation.
mapping_evaluation_parameters = {
'trained_model1_path': 'path/to/model1'
'trained_model2_path': 'path/to/model2'
'trained_model3_path': 'path/to/model3'
'trained_model_path': 'path/to/model'
'data_directory': 'path/to/data'
'mapping_data_file': 'path/to/file'
'data_list': 'path/to/datalist.txt/
'orientation1': 'coronal'
'orientation2': 'axial'
'orientation3': 'sagittal'
'prediction_output_path': 'directory-of-saved-predictions'
'batch_size': 2
'device': 0
'exit_on_error': True
'number_of_paths': 3
}
"""
trained_model1_path = mapping_evaluation_parameters['trained_model1_path']
trained_model2_path = mapping_evaluation_parameters['trained_model2_path']
trained_model3_path = mapping_evaluation_parameters['trained_model3_path']
trained_model_path = mapping_evaluation_parameters['trained_model_path']
data_directory = mapping_evaluation_parameters['data_directory']
mapping_data_file = mapping_evaluation_parameters['mapping_data_file']
data_list = mapping_evaluation_parameters['data_list']
orientation1 = mapping_evaluation_parameters['orientation1']
orientation2 = mapping_evaluation_parameters['orientation2']
orientation3 = mapping_evaluation_parameters['orientation3']
prediction_output_path = mapping_evaluation_parameters['prediction_output_path']
batch_size = mapping_evaluation_parameters['batch_size']
device = mapping_evaluation_parameters['device']
exit_on_error = mapping_evaluation_parameters['exit_on_error']
if mapping_evaluation_parameters['number_of_paths'] == 1:
evaluations.evaluate_single_path(trained_model1_path,
data_directory,
data_list,
orientation1,
prediction_output_path,
batch_size,
device=device,
exit_on_error=exit_on_error)
elif mapping_evaluation_parameters['number_of_paths'] == 2:
evaluations.evaluate_two_paths(trained_model1_path,
trained_model2_path,
data_directory,
data_list,
orientation1,
orientation2,
prediction_output_path,
batch_size,
device=device,
exit_on_error=exit_on_error)
elif mapping_evaluation_parameters['number_of_paths'] == 3:
evaluations.evaluate_all_paths(trained_model1_path,
trained_model2_path,
trained_model3_path,
data_directory,
data_list,
orientation1,
orientation2,
orientation3,
prediction_output_path,
batch_size,
device=device,
exit_on_error=exit_on_error)
evaluations.evaluate_mapping(trained_model_path,
data_directory,
mapping_data_file,
data_list,
prediction_output_path,
batch_size,
device=device,
def delete_files(folder):
""" Clear Folder Contents
......@@ -373,11 +329,6 @@ def delete_files(folder):
Args:
folder (str): Name of folders whose conents is to be deleted
Returns:
None
Raises:
Exception: Any error
"""
for object_name in os.listdir(folder):
......@@ -432,6 +383,7 @@ if __name__ == '__main__':
network_parameters, misc_parameters)
# NOTE: THE EVAL FUNCTIONS HAVE NOT YET BEEN DEBUGGED (16/04/20)
# NOTE: THE EVAL-MAPPING FUNCTION HAS BEEN DEBUGGED (28/04/20)
elif arguments.mode == 'evaluate-score':
evaluate_score(training_parameters,
......
[MAPPING]
trained_model1_path = "path/to/model1"
trained_model2_path = "path/to/model2"
trained_model3_path = "path/to/model3"
data_directory = "path/to/data"
data_list = "path/to/datalist.txt/"
orientation1 = "coronal"
orientation2 = "axial"
orientation3 = "sagittal"
prediction_output_path = "directory-of-saved-predictions"
batch_size = 2
trained_model_path = "saved_models/finetuned_alldata.pth.tar"
data_directory = "/well/win-biobank/projects/imaging/data/data3/subjectsAll/"
mapping_data_file = "dMRI/autoptx_preproc/tractsNormSummed.nii.gz"
data_list = "datasets/test.txt"
prediction_output_path = "network_predictions"
batch_size = 1
device = 0
exit_on_error = True
number_of_paths = 3
\ No newline at end of file
exit_on_error = True
\ No newline at end of file
......@@ -173,6 +173,10 @@ class Solver():
for batch_index, sampled_batch in enumerate(dataloaders[phase]):
X = sampled_batch[0].type(torch.FloatTensor)
# X = ( X - X.min() ) / ( X.max() - X.min() )
# X = ( X - X.mean() ) / X.std()
y = sampled_batch[1].type(torch.FloatTensor)
# We add an extra dimension (~ number of channels) for the 3D convolutions.
......@@ -232,6 +236,8 @@ class Solver():
filename=os.path.join(self.experiment_directory_path, self.checkpoint_directory,
'checkpoint_epoch_' + str(epoch) + '.' + checkpoint_extension)
)
if epoch != self.start_epoch:
os.remove(os.path.join(self.experiment_directory_path, self.checkpoint_directory, 'checkpoint_epoch_' + str(epoch-1) + '.' + checkpoint_extension))
if phase == 'train':
learning_rate_scheduler.step()
......
......@@ -200,6 +200,167 @@ def evaluate_dice_score(trained_model_path,
return average_dice_score
def evaluate_mapping(trained_model_path,
data_directory,
mapping_data_file,
data_list,
prediction_output_path,
batch_size,
device=0,
mode='evaluate',
exit_on_error=False):
"""Model Evaluator
This function generates the rsfMRI map for an input running on on a single axis or path
Args:
trained_model_path (str): Path to the location of the trained model
data_directory (str): Path to input data directory
mapping_data_file (str): Path to the input file
data_list (str): Path to a .txt file containing the input files for consideration
prediction_output_path (str): Output prediction path
batch_size (int): Size of batch to be evaluated
device (str/int): Device type used for training (int - GPU id, str- CPU)
mode (str): Current run mode or phase
exit_on_error (bool): Flag that triggers the raising of an exception
Raises:
FileNotFoundError: Error in reading the provided file!
Exception: Error code execution!
"""
log.info(
"Started Evaluation. Check tensorboard for plots (if a LogWriter is provided)")
with open(data_list) as data_list_file:
volumes_to_be_used = data_list_file.read().splitlines()
# Test if cuda is available and attempt to run on GPU
cuda_available = torch.cuda.is_available()
if type(device) == int:
if cuda_available:
model = torch.load(trained_model_path)
torch.cuda.empty_cache()
model.cuda(device)
else:
log.warning(
"CUDA not available. Switching to CPU. Investigate behaviour!")
device = 'cpu'
if (type(device) == str) or not cuda_available:
model = torch.load(trained_model_path,
map_location=torch.device(device))
model.eval()
# Create the prediction path folder if this is not available
data_utils.create_folder(prediction_output_path)
# Initiate the evaluation
log.info("rsfMRI Generation Started")
file_paths = data_utils.load_file_paths(data_directory, data_list, mapping_data_file)
with torch.no_grad():
for volume_index, file_path in enumerate(file_paths):
try:
# Generate volume & header
_, predicted_volume, header, xform = _generate_volume_map(file_path, model, batch_size, device, cuda_available)
# Generate New Header Affine
header_affines = np.array(
[header['srow_x'], header['srow_y'], header['srow_z'], [0, 0, 0, 1]])
output_nifti_image = Image(predicted_volume, header=header, xform=xform)
output_nifti_path = os.path.join(
prediction_output_path, volumes_to_be_used[volume_index])
if '.nii' not in output_nifti_path:
output_nifti_path += '.nii.gz'
output_nifti_image.save(output_nifti_path)
log.info("Processed: " + volumes_to_be_used[volume_index] + " " + str(
volume_index + 1) + " out of " + str(len(volumes_to_be_used)))
except FileNotFoundError as exception_expression:
log.error("Error in reading the provided file!")
log.exception(exception_expression)
if exit_on_error:
raise(exception_expression)
except Exception as exception_expression:
log.error("Error code execution!")
log.exception(exception_expression)
if exit_on_error:
raise(exception_expression)
log.info("rsfMRI Generation Complete")
def _generate_volume_map(file_path, model, batch_size, device, cuda_available):
"""rsfMRI Volume Generator
This function uses the trained model to generate a new volume
Args:
file_path (str): Path to the desired file
model (class): BrainMapper model class
batch_size (int): Size of batch to be evaluated
device (str/int): Device type used for training (int - GPU id, str- CPU)
cuda_available (bool): Flag indicating if a cuda-enabled GPU is present
Returns
predicted_volume (np.array): Array containing the information regarding the generated volume
header (class): 'nibabel.nifti1.Nifti1Header' class object, containing volume metadata
"""
volume, header, xform = data_utils.load_and_preprocess_evaluation(file_path)
if len(volume.shape) == 4:
volume = volume
else:
volume = volume[np.newaxis, np.newaxis, :, :, :]
volume = torch.tensor(volume).type(torch.FloatTensor)
output_volume = []
for i in range(0, len(volume), batch_size):
batch_x = volume[i: i+batch_size]
if cuda_available and (type(device) == int):
batch_x = batch_x.cuda(device)
output = model(batch_x)
output_volume.append(output)
output_volume = torch.cat(output_volume)
predicted_volume = output_volume
# _, predicted_volume = torch.max(output_volume, dim=1)
# This needs to be checked - torch.max returns max values and locations.
# For segmentations, we are interested in the locations
# For the functional data, we might be interested in the actual values.
# The strength of the value represents the strength of the activation
# A threshold might also be required!
predicted_volume = (predicted_volume.cpu().numpy()).astype('float32')
predicted_volume = np.squeeze(predicted_volume)
return output_volume, predicted_volume, header, xform
# DEPRECATED FUNCTIONS
def evaluate_single_path(trained_model_path,
data_directory,
data_list,
......@@ -591,7 +752,7 @@ def _generate_volume(file_path, model, orientation, batch_size, device, cuda_ava
header (class): 'nibabel.nifti1.Nifti1Header' class object, containing volume metadata
"""
volume, header = data_utils.load_and_preprocess_evaluation(
volume, header = data_utils.load_and_preprocess_evaluation2D(
file_path, orientation)
if len(volume.shape) == 4:
......@@ -635,4 +796,4 @@ def _generate_volume(file_path, model, orientation, batch_size, device, cuda_ava
predicted_volume = predicted_volume
output_volume = output_volume
return output_volume, predicted_volume, header
return output_volume, predicted_volume, header
\ No newline at end of file
......@@ -237,6 +237,10 @@ class DataMapper(data.Dataset):
"""
volume_resampled, _ = resampleToPixdims(
self.read_data_files(path), (2, 2, 2))
# volume_resampled = (volume_resampled - np.min(volume_resampled)) / (np.max(volume_resampled) - np.min(volume_resampled))
# volume_resampled = (volume_resampled - np.mean(volume_resampled)) / np.std(volume_resampled)
return volume_resampled
def read_data_files(self, path):
......@@ -321,7 +325,7 @@ def create_folder(path):
os.mkdir(path)
def load_file_paths(data_directory, data_list, targets_directory=None):
def load_file_paths(data_directory, data_list, mapping_data_file, targets_directory=None, target_file=None):
"""File Loader
This function returns a list of combined file paths for the input data and labelled output data.
......@@ -329,6 +333,7 @@ def load_file_paths(data_directory, data_list, targets_directory=None):
Args:
data_directory (str): Path to input data directory
data_list (str): Path to a .txt file containing the input files for consideration
mapping_data_file (str): Path to the input files
targets_directory (str): Path to labelled data (Y-equivalent); None if during evaluation.
Returns:
......@@ -344,12 +349,12 @@ def load_file_paths(data_directory, data_list, targets_directory=None):
else:
volumes_to_be_used = [files for files in os.listdir(data_directory)]
if targets_directory == None:
file_paths = [[os.path.join(data_directory, volume)]
if targets_directory == None or target_file == None:
file_paths = [[os.path.join(data_directory, volume, mapping_data_file)]
for volume in volumes_to_be_used]
else:
file_paths = [[os.path.join(data_directory, volume), os.join.path(
targets_directory, volume)] for volume in volumes_to_be_used]
file_paths = [[os.path.join(data_directory, volume, mapping_data_file), os.join.path(
targets_directory, volume, )] for volume in volumes_to_be_used]
return file_paths
......@@ -401,19 +406,6 @@ def load(file_path, orientation):
return volume, label_map, nifty_volume.header
def preprocess():
# IDEEA - Might be worth adding the summed tracts function either to here, or the preprocessor file!
"""Data ProProcessing Function
This function carries out several specific pre-processing operations on the loaded data.
As things are - no specific pre-processing currently required!
"""
return None
def set_orientation(volume, label_map, orientation):
"""Load Data Orientation
......@@ -446,15 +438,14 @@ def set_orientation(volume, label_map, orientation):
"Orientation value is invalid. It must be either >>coronal<<, >>axial<< or >>sagital<< ")
def load_and_preprocess_evaluation(file_path, orientation, min_max=True):
def load_and_preprocess_evaluation(file_path, min_max=False):
"""Load & Preprocessing before evaluation
This function loads a nifty file and returns its volume and header information
Args:
file_path (str): Path to the desired file
orientation (str): String detailing the current view (COR, SAG, AXL)
min_max (bool): Flag for inducing min-max normalization of the volume
min_max (bool): Flag for inducing min-max normalization of the volume; default = False
Returns:
volume (np.array): Array of training image data of data type dtype.
......@@ -464,31 +455,25 @@ def load_and_preprocess_evaluation(file_path, orientation, min_max=True):
ValueError: "Orientation value is invalid. It must be either >>coronal<<, >>axial<< or >>sagital<< "
"""
nifty_volume = nib.load(file_path[0])
original_image = Image(file_path[0])
volume, xform = resampleToPixdims(original_image, (2, 2, 2))
header = Image(volume, header=original_image.header, xform=xform).header
volume = nifty_volume.get_fdata()
header = nifty_volume.header
if min_max:
if min_max == True:
volume = (volume - np.min(volume)) / (np.max(volume) - np.min(volume))
else:
elif min_max == False:
volume = np.round(volume)
if orientation == "sagittal":
return volume # This is assumed to be the default orientation
elif orientation == "axial":
return volume.transpose((1, 2, 0))
elif orientation == "coronal":
return volume.transpose((2, 0, 1))
else:
raise ValueError(
"Orientation value is invalid. It must be either >>coronal<<, >>axial<< or >>sagital<< ")
pass
return volume, header, xform
return volume, header
# Deprecated Functions & Classes & Methods:
def tract_sum_generator(folder_path):
"""Sums the tracts of different dMRI files
......@@ -621,3 +606,44 @@ def get_datasetsHDF5(data_parameters):
training_labels['label'][()]),
DataMapperHDF5(testing_data['data'][()], testing_labels['label'][()])
)
def load_and_preprocess_evaluation2D(file_path, orientation, min_max=True):
"""Load & Preprocessing before evaluation
This function loads a nifty file and returns its volume and header information
Args:
file_path (str): Path to the desired file
orientation (str): String detailing the current view (COR, SAG, AXL)
min_max (bool): Flag for inducing min-max normalization of the volume
Returns:
volume (np.array): Array of training image data of data type dtype.
header (class): 'nibabel.nifti1.Nifti1Header' class object, containing image metadata
Raises:
ValueError: "Orientation value is invalid. It must be either >>coronal<<, >>axial<< or >>sagital<< "
"""
nifty_volume = nib.load(file_path[0])
volume = nifty_volume.get_fdata()
header = nifty_volume.header
if min_max:
volume = (volume - np.min(volume)) / (np.max(volume) - np.min(volume))
else:
volume = np.round(volume)
if orientation == "sagittal":
return volume # This is assumed to be the default orientation
elif orientation == "axial":
return volume.transpose((1, 2, 0))
elif orientation == "coronal":
return volume.transpose((2, 0, 1))
else:
raise ValueError(
"Orientation value is invalid. It must be either >>coronal<<, >>axial<< or >>sagital<< ")
return volume, header
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment