Commit 76239fd8 authored by Andrei-Claudiu Roibu's avatar Andrei-Claudiu Roibu 🖥
Browse files

pep8 code reformating

parent 21bbaae0
......@@ -24,6 +24,7 @@ import configparser
from fsl.data.image import Image
from fsl.utils.image.resample import resampleToPixdims
def directory_reader(folder_location, write_txt=False):
"""Produces a list of of data-tags which are accessible
......@@ -52,12 +53,13 @@ def directory_reader(folder_location, write_txt=False):
return subDirectoryList
def data_test_train_validation_split(folder_location, train_percentage, validation_percentage):
"""Produces lists of train, test and validation data
This function looks at the list of all available directories and returns three lists of dsub-directories.
These lists are the lists required for training, testing and validation.
Args:
folder_location (str): A string containing the address of the required directory.
train_percentage (int): Percentage of data to be used for training
......@@ -71,12 +73,14 @@ def data_test_train_validation_split(folder_location, train_percentage, validati
subDirectoryList = np.array(subDirectoryList)
train, validation, test = np.split(subDirectoryList, [int(train_percentage/100 * len(subDirectoryList)), int((train_percentage+validation_percentage)/100 * len(subDirectoryList))])
train, validation, test = np.split(subDirectoryList, [int(train_percentage/100 * len(
subDirectoryList)), int((train_percentage+validation_percentage)/100 * len(subDirectoryList))])
np.savetxt('train.txt', train, fmt='%s')
np.savetxt('test.txt', test, fmt='%s')
np.savetxt('validation.txt', validation, fmt='%s')
def update_shuffling_flag(file_name):
""" Update shuffling flag
......@@ -92,6 +96,7 @@ def update_shuffling_flag(file_name):
with open(file_name, 'w') as configfile:
config.write(configfile)
class DataMapper(data.Dataset):
"""Data Mapper Class
......@@ -124,37 +129,38 @@ class DataMapper(data.Dataset):
self.sample_pairs = []
self._get_datasets()
def __len__ (self):
def __len__(self):
return len(self.sample_pairs)
def __getitem__(self, index):
X_path, y_path = self.sample_pairs[index]
X_volume = torch.from_numpy(self.resample(X_path))
y_volume = torch.from_numpy(self.convert_to_numpy(y_path)[:,:,:,0])
return X_volume, y_volume
y_volume = torch.from_numpy(self.convert_to_numpy(y_path)[:, :, :, 0])
return X_volume, y_volume
def _get_datasets(self):
"""File path generator
Helper function which reads all the various strings and generates the required paths.
"""
with open(self.filename) as files:
lines = files.read().split('\n')
lines = files.read().split('\n')
for line in lines:
if line == '':
pass
else:
X_path = os.path.join(self.data_directory, line, self.data_file)
y_path = os.path.join(self.data_directory, line, self.output_targets)
X_path = os.path.join(
self.data_directory, line, self.data_file)
y_path = os.path.join(
self.data_directory, line, self.output_targets)
self.sample_pairs.append((X_path, y_path))
def resample(self, path):
"""dMRI Resample
......@@ -168,9 +174,10 @@ class DataMapper(data.Dataset):
Returns:
volume_resampled (np.array): Resampled volume
"""
volume_resampled, _ = resampleToPixdims(self.read_data_files(path), (2,2,2))
volume_resampled, _ = resampleToPixdims(
self.read_data_files(path), (2, 2, 2))
return volume_resampled
def read_data_files(self, path):
"""Volume Reader
......@@ -185,7 +192,7 @@ class DataMapper(data.Dataset):
volume_image = Image(path)
return volume_image
def convert_to_numpy(self, path):
"""Numpy wrapper
......@@ -201,6 +208,7 @@ class DataMapper(data.Dataset):
volume_numpy = self.read_data_files(path).data
return volume_numpy
def get_datasets(data_parameters):
"""Data Loader Function.
......@@ -232,8 +240,10 @@ def get_datasets(data_parameters):
validation_output_targets = data_parameters['validation_target_file']
return (
DataMapper(train_filename, data_directory, train_data_file, train_output_targets),
DataMapper(validation_filename, data_directory, validation_data_file, validation_output_targets)
DataMapper(train_filename, data_directory,
train_data_file, train_output_targets),
DataMapper(validation_filename, data_directory,
validation_data_file, validation_output_targets)
)
......@@ -417,6 +427,7 @@ def load_and_preprocess_evaluation(file_path, orientation, min_max=True):
# Deprecated Functions & Classes & Methods:
def tract_sum_generator(folder_path):
"""Sums the tracts of different dMRI files
......@@ -477,6 +488,7 @@ def tract_sum_generator(folder_path):
return None
class DataMapperHDF5(data.Dataset):
"""Data Mapper Class.
......@@ -508,6 +520,7 @@ class DataMapperHDF5(data.Dataset):
def __len__(self):
return len(self.y)
def get_datasetsHDF5(data_parameters):
"""Data Loader Function.
......@@ -543,10 +556,12 @@ def get_datasetsHDF5(data_parameters):
data_parameters['data_directory'], data_parameters['testing_targets']), 'r')
return (
DataMapperHDF5(training_data['data'][()], training_labels['label'][()]),
DataMapperHDF5(training_data['data'][()],
training_labels['label'][()]),
DataMapperHDF5(testing_data['data'][()], testing_labels['label'][()])
)
if __name__ == "__main__":
folder_location = "../well/win-biobank/projects/imaging/data/data3/subjectsAll/"
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment