Commit 186328d1 authored by Andrei-Claudiu Roibu's avatar Andrei-Claudiu Roibu 🖥
Browse files

wrote the get_datasets function

parent 3742f2ff
......@@ -14,9 +14,11 @@ To use content from this folder, import the functions and instantiate them as yo
import os
import h5py
from glob import glob
import numpy as np
import nibabel as nib
import torch.utils.data as data
def dirReader(folder_location):
"""Produces a list of of data-tags which are accessible
......@@ -143,7 +145,38 @@ class DataMapper(data.Dataset):
return len(self.y)
def get_datasets(data_parameters):
pass
"""Data Loader Function.
This function loads the various data file and returns the relevand mapped datasets.
Args:
data_parameters (dict): Dictionary containing relevant information for the datafiles.
data_parameters = {
data_directory: 'path/to/directory'
train_data_file: 'training_data'
train_output_targets: 'training_targets'
test_data_file: 'testing_data'
test_target_file: 'testing_targets'
}
Returns:
touple: the relevant train and test datasets
Raises:
None
"""
training_data = h5py.Files(os.path.join(data_parameters['data_directory'], data_parameters['training_data']), 'r')
testing_data = h5py.Files(os.path.join(data_parameters['data_directory'], data_parameters['testing_data']), 'r')
training_labels = h5py.Files(os.path.join(data_parameters['data_directory'], data_parameters['training_targets']), 'r')
testing_labels = h5py.Files(os.path.join(data_parameters['data_directory'], data_parameters['testing_targets']), 'r')
return (
DataMapper(training_data['data'][()], training_labels['label'][()]),
DataMapper(testing_data['data'][()], testing_labels['label'][()])
)
if __name__ == "__main__":
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment