Commit 7eacf841 authored by Andrei-Claudiu Roibu's avatar Andrei-Claudiu Roibu 🖥
Browse files

updated docstrings, pep8 formated, fixed bugs

parent c442542d
"""Brain Mapper U-Net Architecture
Description:
-------------
This folder contains the Pytorch implementation of the core U-net architecture.
This arcitecture predicts functional connectivity rsfMRI from structural connectivity information from dMRI.
Usage
-------------
To use this module, import it and instantiate is as you wish:
This folder contains the Pytorch implementation of the core U-net architecture.
This arcitecture predicts functional connectivity rsfMRI from structural connectivity information from dMRI.
from BrainMapperUNet import BrainMapperUNet
deep_learning_model = BrainMapperUnet(parameters)
Usage:
To use this module, import it and instantiate is as you wish:
from BrainMapperUNet import BrainMapperUNet
deep_learning_model = BrainMapperUnet(parameters)
"""
......@@ -19,6 +19,7 @@ import torch
import torch.nn as nn
import utils.modules as modules
class BrainMapperUNet(nn.Module):
"""Architecture class BrainMapper U-net.
......@@ -39,14 +40,11 @@ class BrainMapperUNet(nn.Module):
'up_mode': 'upconv'
'number_of_classes': 1
}
Returns:
probability_map (torch.tensor): Output forward passed tensor through the U-net block
Raises:
None
"""
def __init__(self, parameters):
super(BrainMapperUNet, self).__init__()
......@@ -80,44 +78,41 @@ class BrainMapperUNet(nn.Module):
Returns:
probability_map (torch.tensor): Output forward passed tensor through the U-net block
Raises:
None
"""
Y_encoder_1, Y_np1, pool_indices1 = self.encoderBlock1.forward(X)
Y_encoder_2, Y_np2, pool_indices2 = self.encoderBlock2.forward(Y_encoder_1)
Y_encoder_3, Y_np3, pool_indices3 = self.encoderBlock3.forward(Y_encoder_2)
Y_encoder_4, Y_np4, pool_indices4 = self.encoderBlock4.forward(Y_encoder_3)
Y_encoder_2, Y_np2, pool_indices2 = self.encoderBlock2.forward(
Y_encoder_1)
Y_encoder_3, Y_np3, pool_indices3 = self.encoderBlock3.forward(
Y_encoder_2)
Y_encoder_4, Y_np4, pool_indices4 = self.encoderBlock4.forward(
Y_encoder_3)
Y_bottleNeck = self.bottleneck.forward(Y_encoder_4)
Y_decoder_1 = self.decoderBlock1.forward(Y_bottleNeck, Y_np4, pool_indices4)
Y_decoder_2 = self.decoderBlock2.forward(Y_decoder_1, Y_np3, pool_indices3)
Y_decoder_3 = self.decoderBlock3.forward(Y_decoder_2, Y_np2, pool_indices2)
Y_decoder_4 = self.decoderBlock4.forwrad(Y_decoder_3, Y_np1, pool_indices1)
Y_decoder_1 = self.decoderBlock1.forward(
Y_bottleNeck, Y_np4, pool_indices4)
Y_decoder_2 = self.decoderBlock2.forward(
Y_decoder_1, Y_np3, pool_indices3)
Y_decoder_3 = self.decoderBlock3.forward(
Y_decoder_2, Y_np2, pool_indices2)
Y_decoder_4 = self.decoderBlock4.forwrad(
Y_decoder_3, Y_np1, pool_indices1)
probability_map = self.classifier.forward(Y_decoder_4)
return probability_map
def save(self, path):
"""Model Saver
Function saving the model with all its parameters to a given path.
The path must end with a *.model argument.
Args:
path (str): Path string
Returns:
None
Raises:
None
"""
print("Saving Model... {}".format(path))
torch.save(self, path)
......@@ -127,21 +122,15 @@ class BrainMapperUNet(nn.Module):
This function tests if the model parameters are allocated to a CUDA enabled GPU.
Args:
None
Returns:
bool: Flag indicating True if the tensor is stored on the GPU and Flase otherwhise
Raises:
None
"""
return next(self.parameters()).is_cuda
def predict(self, X, device= 0):
def predict(self, X, device=0):
"""Post-training Output Prediction
This function predicts the output of the of the U-net post-training
Args:
......@@ -151,11 +140,8 @@ class BrainMapperUNet(nn.Module):
Returns:
prediction (ndarray): predicted output after training
Raises:
None
"""
self.eval() # PyToch module setting network to evaluation mode
self.eval() # PyToch module setting network to evaluation mode
if type(X) is np.ndarray:
X = torch.tensor(X, requires_grad=False).type(torch.FloatTensor)
......@@ -165,13 +151,14 @@ class BrainMapperUNet(nn.Module):
# .cuda() call transfers the densor from the CPU to the GPU if that is the case.
# Non-blocking argument lets the caller bypas synchronization when necessary
with torch.no_grad(): # Causes operations to have no gradients
with torch.no_grad(): # Causes operations to have no gradients
output = self.forward(X)
_, idx = torch.max(output, 1)
idx = idx.data.cpu().numpy() # We retrieve the tensor held by idx (.data), and map it to a cpu as an ndarray
# We retrieve the tensor held by idx (.data), and map it to a cpu as an ndarray
idx = idx.data.cpu().numpy()
prediction = np.squeeze(idx)
del X, output, idx
......
"""Brain Mapper Run File
Description:
-------------
This file contains all the relevant functions for running BrainMapper.
The network can be ran in one of these modes:
- train
- evaluate path
- evaluate whole
TODO: Might be worth adding some information on uncertaintiy estimation, later down the line
This file contains all the relevant functions for running BrainMapper.
The network can be ran in one of these modes:
- train
- evaluate path
- evaluate whole
TODO: Might be worth adding some information on uncertaintiy estimation, later down the line
Usage
-------------
In order to run the network, in the terminal, the user needs to pass it relevant arguments:
- (TODO: ADD ARGUMENTS)
Usage:
In order to run the network, in the terminal, the user needs to pass it relevant arguments:
$ ./setup.sh
$ source env/bin/activate
$ python run.py --mode ...
The arguments for mode are the following:
mode=train # For training the model
mode=evaluate-score # For evaluating the model score
mode=evaluate-mapping # For evaluating the model mapping
mode=clear-experiment # For clearning the experiments and logs directories of the last experiment
mode=clear-all # For clearing all the files from the experiments and logs directories/
"""
import torch
from utils.data_utils import get_datasets
from BrainMapperUNet import BrainMapperUNet
import torch.utils.data as data
from solver import Solver
import os
from utils.data_logging_utils import LogWriter
import utils.data_evaluation_utils as evaluations
import shutil
import argparse
from settings import Settings
import logging
from settings import Settings
import torch
import torch.utils.data as data
from solver import Solver
from BrainMapperUNet import BrainMapperUNet
from utils.data_utils import get_datasets
import utils.data_evaluation_utils as evaluations
from utils.data_logging_utils import LogWriter
# Set the default floating point tensor type to FloatTensor
torch.set_default_tensor_type(torch.FloatTensor)
def load_data(data_parameters):
"""Dataset Loader
......@@ -43,21 +56,11 @@ def load_data(data_parameters):
Args:
data_parameters (dict): Dictionary containing relevant information for the datafiles.
data_parameters = {
'data_directory': 'path/to/directory'
'train_data_file': 'training_data'
'train_output_targets': 'training_targets'
'test_data_file': 'testing_data'
'test_target_file': 'testing_targets'
}
Returns:
train_data (dataset object): Pytorch map-style dataset object, mapping indices to training data samples.
test_data (dataset object): Pytorch map-style dataset object, mapping indices to testing data samples.
Raises:
None
"""
print("Data is loading...")
train_data, test_data = get_datasets(data_parameters)
......@@ -67,9 +70,10 @@ def load_data(data_parameters):
return train_data, test_data
def train(data_parameters, training_parameters, network_parameters, misc_parameters):
"""Training Function
This function trains a given model using the provided training data.
Currently, the data loaded is set to have multiple sub-processes.
A high enough number of workers assures that CPU computations are efficiently managed, i.e. that the bottleneck is indeed the neural network's forward and backward operations on the GPU (and not data generation)
......@@ -78,13 +82,6 @@ def train(data_parameters, training_parameters, network_parameters, misc_paramet
Args:
data_parameters (dict): Dictionary containing relevant information for the datafiles.
data_parameters = {
'data_directory': 'path/to/directory'
'train_data_file': 'training_data'
'train_output_targets': 'training_targets'
'test_data_file': 'testing_data'
'test_target_file': 'testing_targets'
}
training_parameters(dict): Dictionary containing relevant hyperparameters for training the network.
training_parameters = {
......@@ -106,19 +103,6 @@ def train(data_parameters, training_parameters, network_parameters, misc_paramet
}
network_parameters (dict): Contains information relevant parameters
network_parameters= {
'kernel_heigth': 5
'kernel_width': 5
'kernel_classification': 1
'input_channels': 1
'output_channels': 64
'convolution_stride': 1
'dropout': 0.2
'pool_kernel_size': 2
'pool_stride': 2
'up_mode': 'upconv'
'number_of_classes': 1
}
misc_parameters (dict): Dictionary of aditional hyperparameters
misc_parameters = {
......@@ -128,30 +112,24 @@ def train(data_parameters, training_parameters, network_parameters, misc_paramet
'device': 1
'experiments_directory': 'experiments-directory'
}
Returns:
None
Raises:
None
"""
train_data, test_data = load_data(data_parameters)
train_loader = data.DataLoader(
dataset= train_data,
batch_size= training_parameters['training_batch_size'],
shuffle= True,
num_workers= 4,
pin_memory= True
dataset=train_data,
batch_size=training_parameters['training_batch_size'],
shuffle=True,
num_workers=4,
pin_memory=True
)
test_loader = data.DataLoader(
dataset= test_data,
batch_size= training_parameters['test_batch_size'],
shuffle= False,
num_workers= 4,
pin_memory= True
dataset=test_data,
batch_size=training_parameters['test_batch_size'],
shuffle=False,
num_workers=4,
pin_memory=True
)
if training_parameters['use_pre_trained']:
......@@ -159,85 +137,54 @@ def train(data_parameters, training_parameters, network_parameters, misc_paramet
else:
BrainMapperModel = BrainMapperUNet(network_parameters)
solver = Solver(model= BrainMapperModel,
device= misc_parameters['device'],
number_of_classes= network_parameters['number_of_classes'],
experiment_name= training_parameters['experiment_name'],
optimizer_arguments = {'lr': training_parameters['learning_rate'],
'betas': training_parameters['optimizer_beta'],
'eps': training_parameters['optimizer_epsilon'],
'weight_decay': training_parameters['optimizer_weigth_decay']
},
model_name = misc_parameters['model_name'],
number_epochs = training_parameters['number_of_epochs'],
loss_log_period = training_parameters['loss_log_period'],
learning_rate_scheduler_step_size = training_parameters['learning_rate_scheduler_step_size'],
learning_rate_scheduler_gamma = training_parameters['learning_rate_scheduler_gamma'],
use_last_checkpoint = training_parameters['use_last_checkpoint'],
experiment_directory = misc_parameters['experiments_directory'],
logs_directory = misc_parameters['logs_directory']
solver = Solver(model=BrainMapperModel,
device=misc_parameters['device'],
number_of_classes=network_parameters['number_of_classes'],
experiment_name=training_parameters['experiment_name'],
optimizer_arguments={'lr': training_parameters['learning_rate'],
'betas': training_parameters['optimizer_beta'],
'eps': training_parameters['optimizer_epsilon'],
'weight_decay': training_parameters['optimizer_weigth_decay']
},
model_name=misc_parameters['model_name'],
number_epochs=training_parameters['number_of_epochs'],
loss_log_period=training_parameters['loss_log_period'],
learning_rate_scheduler_step_size=training_parameters[
'learning_rate_scheduler_step_size'],
learning_rate_scheduler_gamma=training_parameters['learning_rate_scheduler_gamma'],
use_last_checkpoint=training_parameters['use_last_checkpoint'],
experiment_directory=misc_parameters['experiments_directory'],
logs_directory=misc_parameters['logs_directory']
)
solver.train(train_loader, test_loader)
model_output_path = os.path.join(misc_parameters['save_model_directory'], training_parameters['final_model_output_file'])
model_output_path = os.path.join(
misc_parameters['save_model_directory'], training_parameters['final_model_output_file'])
BrainMapperModel.save(model_output_path)
print("Final Model Saved in: {}".format(model_output_path))
def evaluate_score(data_parameters, training_parameters, network_parameters, misc_parameters, evaluation_parameters):
def evaluate_score(training_parameters, network_parameters, misc_parameters, evaluation_parameters):
"""Mapping Score Evaluator
This function evaluates a given trained model by calculating the it's dice score prediction.
Args:
data_parameters (dict): Dictionary containing relevant information for the datafiles.
data_parameters = {
'data_directory': 'path/to/directory'
'train_data_file': 'training_data'
'train_output_targets': 'training_targets'
'test_data_file': 'testing_data'
'test_target_file': 'testing_targets'
}
training_parameters(dict): Dictionary containing relevant hyperparameters for training the network.
training_parameters = {
'training_batch_size': 5
'test_batch_size: 5
'use_pre_trained': False
'pre_trained_path': 'pre_trained/path'
'experiment_name': 'experiment_name'
'learning_rate': 1e-4
'optimizer_beta': (0.9, 0.999)
'optimizer_epsilon': 1e-8
'optimizer_weigth_decay': 1e-5
'number_of_epochs': 10
'loss_log_period': 50
'learning_rate_scheduler_step_size': 3
'learning_rate_scheduler_gamma': 1e-1
'use_last_checkpoint': True
'final_model_output_file': 'path/to/model'
}
network_parameters (dict): Contains information relevant parameters
network_parameters= {
'kernel_heigth': 5
'kernel_width': 5
'kernel_classification': 1
'input_channels': 1
'output_channels': 64
'convolution_stride': 1
'dropout': 0.2
'pool_kernel_size': 2
'pool_stride': 2
'up_mode': 'upconv'
'number_of_classes': 1
}
misc_parameters (dict): Dictionary of aditional hyperparameters
misc_parameters = {
'save_model_directory': 'directory_name'
'model_name': 'BrainMapper'
'logs_directory': 'log-directory'
'device': 1
'experiments_directory': 'experiments-directory'
......@@ -252,37 +199,33 @@ def evaluate_score(data_parameters, training_parameters, network_parameters, mis
'orientation': 'coronal'
'saved_predictions_directory': 'directory-of-saved-predictions'
}
Returns:
None
Raises:
None
"""
logWriter = LogWriter(number_of_classes= network_parameters['number_of_classes'],
logs_directory= misc_parameters['logs_directory'],
experiment_name= training_parameters['experiment_name']
)
logWriter = LogWriter(number_of_classes=network_parameters['number_of_classes'],
logs_directory=misc_parameters['logs_directory'],
experiment_name=training_parameters['experiment_name']
)
prediction_output_path = os.path.join(misc_parameters['experiments_directory'],
training_parameters['experiment_name'],
evaluation_parameters['saved_predictions_directory']
)
average_dice_score = evaluations.evaluate_dice_score(trained_model_path= evaluation_parameters['trained_model_path'],
number_of_classes= network_parameters['number_of_classes'],
data_directory= evaluation_parameters['data_directory'],
targets_directory= evaluation_parameters['targets_directory'],
data_list= evaluation_parameters['data_list'],
orientation= evaluation_parameters['orientation'],
prediction_output_path= prediction_output_path,
device= misc_parameters['device'],
LogWriter= logWriter
)
training_parameters['experiment_name'],
evaluation_parameters['saved_predictions_directory']
)
average_dice_score = evaluations.evaluate_dice_score(trained_model_path=evaluation_parameters['trained_model_path'],
number_of_classes=network_parameters['number_of_classes'],
data_directory=evaluation_parameters['data_directory'],
targets_directory=evaluation_parameters[
'targets_directory'],
data_list=evaluation_parameters['data_list'],
orientation=evaluation_parameters['orientation'],
prediction_output_path=prediction_output_path,
device=misc_parameters['device'],
LogWriter=logWriter
)
logWriter.close()
def evaluate_mapping(mapping_evaluation_parameters):
"""Mapping Evaluator
......@@ -314,11 +257,6 @@ def evaluate_mapping(mapping_evaluation_parameters):
'number_of_paths': 3
}
Returns:
None
Raises:
None
"""
trained_model1_path = mapping_evaluation_parameters['trained_model1_path']
trained_model2_path = mapping_evaluation_parameters['trained_model2_path']
......@@ -327,62 +265,62 @@ def evaluate_mapping(mapping_evaluation_parameters):
data_list = mapping_evaluation_parameters['data_list']
orientation1 = mapping_evaluation_parameters['orientation1']
orientation2 = mapping_evaluation_parameters['orientation2']
orientation3 =mapping_evaluation_parameters['orientation3']
orientation3 = mapping_evaluation_parameters['orientation3']
prediction_output_path = mapping_evaluation_parameters['prediction_output_path']
batch_size = mapping_evaluation_parameters['batch_size']
device= mapping_evaluation_parameters['device']
device = mapping_evaluation_parameters['device']
exit_on_error = mapping_evaluation_parameters['exit_on_error']
if mapping_evaluation_parameters['number_of_paths'] == 1:
evaluations.evaluate_single_path(trained_model_path= trained_model1_path,
data_directory,
data_list,
orientation= orientation1,
prediction_output_path,
batch_size,
device= device,
exit_on_error= exit_on_error):
evaluations.evaluate_single_path(trained_model1_path,
data_directory,
data_list,
orientation1,
prediction_output_path,
batch_size,
device=device,
exit_on_error=exit_on_error)
elif mapping_evaluation_parameters['number_of_paths'] == 2:
evaluations.evaluate_two_paths(trained_model1_path,
trained_model2_path,
data_directory,
data_list,
orientation1,
orientation2,
prediction_output_path,
batch_size,
device= device,
exit_on_error= exit_on_error)
trained_model2_path,
data_directory,
data_list,
orientation1,
orientation2,
prediction_output_path,
batch_size,
device=device,
exit_on_error=exit_on_error)
elif mapping_evaluation_parameters['number_of_paths'] == 3:
evaluations.evaluate_all_paths(trained_model1_path,
trained_model2_path,
trained_model3_path,
data_directory,
data_list,
orientation1,
orientation2,
orientation3,
prediction_output_path,
batch_size,
device= device
exit_on_error= exit_on_error)
def delete_files():
trained_model2_path,
trained_model3_path,
data_directory,
data_list,
orientation1,
orientation2,
orientation3,
prediction_output_path,
batch_size,
device=device,
exit_on_error=exit_on_error)
def delete_files(folder):
""" Clear Folder Contents
Function which clears contents (like experiments or logs)
Args:
folder (str): Name of folders whose conents is to be deleted
Returns:
None
Raises:
Exception: Any error
"""
for object_name in os.listdir(folder):
file_path = os.path.join(folder, object_name)
try:
......@@ -396,8 +334,10 @@ def delete_files():
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--mode', '-m', required= True, help= 'run mode, valid values are train or evaluate')
parser.add_argument('--settings_path', '-sp', required= False, help='optional argument, set path to settings_evaluation.ini')
parser.add_argument('--mode', '-m', required=True,
help='run mode, valid values are train or evaluate')
parser.add_argument('--settings_path', '-sp', required=False,
help='optional argument, set path to settings_evaluation.ini')
arguments = parser.parse_args()
......@@ -409,9 +349,11 @@ if __name__ == '__main__':
evaluation_parameters = settings['EVALUATION']
if arguments.mode == 'train':
train(data_parameters, training_parameters, network_parameters, misc_parameters)
train(data_parameters, training_parameters,
network_parameters, misc_parameters)
elif arguments.mode == 'evaluate-score':
evaluate_score(data_parameters, training_parameters, network_parameters, misc_parameters, evaluation_parameters)