Commit 92f1b83d authored by Andrei-Claudiu Roibu's avatar Andrei-Claudiu Roibu 🖥
Browse files

Merge branch 'autoencoder' into 'master'

Autoencoder

See merge request !2
parents bd5515dc 159001d8
......@@ -120,4 +120,7 @@ datasets/
files.txt
jobscript.sge.sh
*.nii.gz
stuff/
\ No newline at end of file
stuff/
test/*
.DS_Store
logs/
......@@ -18,6 +18,7 @@ import numpy as np
import torch
import torch.nn as nn
import utils.modules as modules
from torch.nn.init import _calculate_fan_in_and_fan_out as calculate_fan
class BrainMapperAE3D(nn.Module):
......@@ -80,6 +81,10 @@ class BrainMapperAE3D(nn.Module):
self.transformerBlock4 = modules.ResNetBlock3D(parameters)
self.transformerBlock5 = modules.ResNetBlock3D(parameters)
self.transformerBlock6 = modules.ResNetBlock3D(parameters)
self.transformerBlock7 = modules.ResNetBlock3D(parameters)
self.transformerBlock8 = modules.ResNetBlock3D(parameters)
self.transformerBlock9 = modules.ResNetBlock3D(parameters)
self.transformerBlock10 = modules.ResNetBlock3D(parameters)
# Decoder
......@@ -125,6 +130,10 @@ class BrainMapperAE3D(nn.Module):
X = self.transformerBlock4.forward(X)
X = self.transformerBlock5.forward(X)
X = self.transformerBlock6.forward(X)
X = self.transformerBlock7.forward(X)
X = self.transformerBlock8.forward(X)
X = self.transformerBlock9.forward(X)
X = self.transformerBlock10.forward(X)
# Decoder
......@@ -214,5 +223,10 @@ class BrainMapperAE3D(nn.Module):
for _, subsubmodule in submodule.named_children():
if isinstance(subsubmodule, (torch.nn.PReLU, torch.nn.Dropout3d, torch.nn.MaxPool3d)) == False:
subsubmodule.reset_parameters()
# if isinstance(subsubmodule, (torch.nn.Conv3d, torch.nn.ConvTranspose3d)):
# gain = np.sqrt(np.divide(2, 1 + np.power(0.25, 2)))
# fan, _ = calculate_fan(subsubmodule.weight)
# std = np.divide(gain, np.sqrt(fan))
# subsubmodule.weight.data.normal_(0, std)
print("Initialized network parameters!")
\ No newline at end of file
print("Initialized network parameters!")
This diff is collapsed.
# FunctionMapper
# BrainMapper
This project will aim to address one of the big challenges in imaging-neuroscience: that of how a brain’s functional connectivity, represented by resting-state maps, can be predicted from structural connectivity information obtained from dw-MRI.
......
......@@ -40,9 +40,8 @@ import torch.utils.data as data
import numpy as np
from solver import Solver
# from BrainMapperUNet import BrainMapperUNet3D, BrainMapperResUNet3D, BrainMapperResUNet3Dshallow, BrainMapperCompResUNet3D
from BrainMapperAE import BrainMapperAE3D
from utils.data_utils import get_datasets, data_test_train_validation_split, update_shuffling_flag, create_folder
from utils.data_utils import get_datasets, data_preparation, update_shuffling_flag, create_folder
import utils.data_evaluation_utils as evaluations
from utils.data_logging_utils import LogWriter
......@@ -150,16 +149,12 @@ def train(data_parameters, training_parameters, network_parameters, misc_paramet
BrainMapperModel = torch.load(
training_parameters['pre_trained_path'])
else:
# BrainMapperModel = BrainMapperUNet3D(network_parameters)
# BrainMapperModel = BrainMapperResUNet3D(network_parameters)
# BrainMapperModel = BrainMapperResUNet3Dshallow(network_parameters)
# BrainMapperModel = BrainMapperCompResUNet3D(network_parameters)
BrainMapperModel = BrainMapperAE3D(network_parameters)
BrainMapperModel.reset_parameters()
optimizer = torch.optim.Adam
# optimizer = torch.optim.AdamW
solver = Solver(model=BrainMapperModel,
device=misc_parameters['device'],
......@@ -259,28 +254,27 @@ def evaluate_score(training_parameters, network_parameters, misc_parameters, eva
# TODO - NEED TO UPDATE THE DATA FUNCTIONS!
logWriter = LogWriter(number_of_classes=network_parameters['number_of_classes'],
logs_directory=misc_parameters['logs_directory'],
experiment_name=training_parameters['experiment_name']
)
prediction_output_path = os.path.join(misc_parameters['experiments_directory'],
training_parameters['experiment_name'],
evaluation_parameters['saved_predictions_directory']
)
_ = evaluations.evaluate_dice_score(trained_model_path=evaluation_parameters['trained_model_path'],
number_of_classes=network_parameters['number_of_classes'],
data_directory=evaluation_parameters['data_directory'],
targets_directory=evaluation_parameters['targets_directory'],
data_list=evaluation_parameters['data_list'],
orientation=evaluation_parameters['orientation'],
prediction_output_path=prediction_output_path,
device=misc_parameters['device'],
LogWriter=logWriter
)
logWriter.close()
evaluations.evaluate_correlation(trained_model_path=evaluation_parameters['trained_model_path'],
data_directory=evaluation_parameters['data_directory'],
mapping_data_file=mapping_evaluation_parameters['mapping_data_file'],
target_data_file=evaluation_parameters['targets_directory'],
data_list=evaluation_parameters['data_list'],
prediction_output_path=prediction_output_path,
brain_mask_path=mapping_evaluation_parameters['brain_mask_path'],
rsfmri_mean_mask_path=mapping_evaluation_parameters[
'rsfmri_mean_mask_path'],
dmri_mean_mask_path=mapping_evaluation_parameters[
'dmri_mean_mask_path'],
mean_regression=mapping_evaluation_parameters['mean_regression'],
scaling_factors=mapping_evaluation_parameters['scaling_factors'],
regression_factors=mapping_evaluation_parameters['regression_factors'],
device=misc_parameters['device'],
)
def evaluate_mapping(mapping_evaluation_parameters):
......@@ -306,12 +300,15 @@ def evaluate_mapping(mapping_evaluation_parameters):
mapping_data_file = mapping_evaluation_parameters['mapping_data_file']
data_list = mapping_evaluation_parameters['data_list']
prediction_output_path = mapping_evaluation_parameters['prediction_output_path']
dmri_mean_mask_path = mapping_evaluation_parameters['dmri_mean_mask_path']
rsfmri_mean_mask_path = mapping_evaluation_parameters['rsfmri_mean_mask_path']
device = mapping_evaluation_parameters['device']
exit_on_error = mapping_evaluation_parameters['exit_on_error']
brain_mask_path = mapping_evaluation_parameters['brain_mask_path']
mean_mask_path = mapping_evaluation_parameters['mean_mask_path']
mean_reduction = mapping_evaluation_parameters['mean_reduction']
mean_regression = mapping_evaluation_parameters['mean_regression']
mean_subtraction = mapping_evaluation_parameters['mean_subtraction']
scaling_factors = mapping_evaluation_parameters['scaling_factors']
regression_factors = mapping_evaluation_parameters['regression_factors']
evaluations.evaluate_mapping(trained_model_path,
data_directory,
......@@ -319,9 +316,12 @@ def evaluate_mapping(mapping_evaluation_parameters):
data_list,
prediction_output_path,
brain_mask_path,
mean_mask_path,
mean_reduction,
dmri_mean_mask_path,
rsfmri_mean_mask_path,
mean_regression,
mean_subtraction,
scaling_factors,
regression_factors,
device=device,
exit_on_error=exit_on_error)
......@@ -366,31 +366,31 @@ if __name__ == '__main__':
# Here we shuffle the data!
if data_parameters['data_split_flag'] == True:
print('Data is shuffling... This could take a few minutes!')
if data_parameters['data_split_flag'] == True:
if data_parameters['use_data_file'] == True:
data_test_train_validation_split(data_parameters['data_folder_name'],
data_parameters['test_percentage'],
data_parameters['subject_number'],
data_directory=data_parameters['data_directory'],
train_inputs=data_parameters['train_data_file'],
train_targets=data_parameters['train_output_targets'],
mean_mask_path=data_parameters['mean_mask_path'],
data_file=data_parameters['data_file'],
K_fold=data_parameters['k_fold']
)
data_preparation(data_parameters['data_folder_name'],
data_parameters['test_percentage'],
data_parameters['subject_number'],
data_directory=data_parameters['data_directory'],
train_inputs=data_parameters['train_data_file'],
train_targets=data_parameters['train_output_targets'],
rsfMRI_mean_mask_path=data_parameters['rsfmri_mean_mask_path'],
dMRI_mean_mask_path=data_parameters['dmri_mean_mask_path'],
data_file=data_parameters['data_file'],
K_fold=data_parameters['k_fold']
)
else:
data_test_train_validation_split(data_parameters['data_folder_name'],
data_parameters['test_percentage'],
data_parameters['subject_number'],
data_directory=data_parameters['data_directory'],
train_inputs=data_parameters['train_data_file'],
train_targets=data_parameters['train_output_targets'],
mean_mask_path=data_parameters['mean_mask_path'],
K_fold=data_parameters['k_fold']
)
data_preparation(data_parameters['data_folder_name'],
data_parameters['test_percentage'],
data_parameters['subject_number'],
data_directory=data_parameters['data_directory'],
train_inputs=data_parameters['train_data_file'],
train_targets=data_parameters['train_output_targets'],
rsfMRI_mean_mask_path=data_parameters['rsfmri_mean_mask_path'],
dMRI_mean_mask_path=data_parameters['dmri_mean_mask_path'],
K_fold=data_parameters['k_fold']
)
update_shuffling_flag('settings.ini')
print('Data is shuffling... Complete!')
......@@ -400,7 +400,6 @@ if __name__ == '__main__':
network_parameters, misc_parameters)
# NOTE: THE EVAL FUNCTIONS HAVE NOT YET BEEN DEBUGGED (16/04/20)
# NOTE: THE EVAL-MAPPING FUNCTION HAS BEEN DEBUGGED (28/04/20)
elif arguments.mode == 'evaluate-score':
evaluate_score(training_parameters,
......@@ -433,6 +432,9 @@ if __name__ == '__main__':
network_parameters, misc_parameters)
logging.basicConfig(filename='evaluate-mapping-error.log')
evaluate_mapping(mapping_evaluation_parameters)
elif arguments.mode == 'prepare-data':
print('Ensure you have updated the settings.ini file accordingly! This call does nothing but pass after data was shuffled!')
pass
else:
raise ValueError(
'Invalid mode value! Only supports: train, evaluate-score, evaluate-mapping, train-and-evaluate-mapping, clear-experiments and clear-everything')
'Invalid mode value! Only supports: train, evaluate-score, evaluate-mapping, train-and-evaluate-mapping, prepare-data, clear-experiments and clear-everything')
......@@ -6,31 +6,34 @@ data_file = "/well/win-biobank/projects/imaging/data/data3/subjectsAll/subj_22k.
k_fold = None
data_split_flag = False
test_percentage = 5
subject_number = 600
subject_number = 12000
train_list = "datasets/train.txt"
validation_list = "datasets/validation.txt"
test_list = "datasets/test.txt"
scaling_factors = "datasets/scaling_factors.pkl"
regression_weights = "datasets/regression_weights.pkl"
train_data_file = "dMRI/autoptx_preproc/tractsNormSummed.nii.gz"
train_output_targets = "fMRI/rfMRI_25.dr/dr_stage2.nii.gz"
validation_data_file = "dMRI/autoptx_preproc/tractsNormSummed.nii.gz"
validation_target_file = "fMRI/rfMRI_25.dr/dr_stage2.nii.gz"
brain_mask_path = "utils/MNI152_T1_2mm_brain_mask.nii.gz"
mean_mask_path = "utils/mean_dr_stage2.nii.gz"
mean_reduction = True
rsfmri_mean_mask_path = "utils/mean_dr_stage2.nii.gz"
dmri_mean_mask_path = "utils/mean_tractsNormSummed_downsampled.nii.gz"
mean_regression = False
mean_subtraction = True
[TRAINING]
experiment_name = "CU3D17-3"
pre_trained_path = "saved_models/CU3D17-3.pth.tar"
final_model_output_file = "CU3D17-3.pth.tar"
training_batch_size = 3
validation_batch_size = 3
experiment_name = "VA2-1"
pre_trained_path = "saved_models/VA2-1.pth.tar"
final_model_output_file = "VA2-1.pth.tar"
training_batch_size = 5
validation_batch_size = 5
use_pre_trained = False
learning_rate = 1e-1
learning_rate = 1e-5
optimizer_beta = (0.9, 0.999)
optimizer_epsilon = 1e-8
optimizer_weigth_decay = 1e-5
number_of_epochs = 200
number_of_epochs = 10
loss_log_period = 50
learning_rate_scheduler_step_size = 5
learning_rate_scheduler_gamma = 1e-1
......@@ -40,19 +43,15 @@ use_last_checkpoint = False
kernel_heigth = 3
kernel_width = 3
kernel_depth = 3
; kernel_classification = 1
kernel_classification = 7
input_channels = 1
output_channels = 64
convolution_stride = 1
dropout = 0.2
; pool_kernel_size = 2
dropout = 0
pool_kernel_size = 3
pool_stride = 2
up_mode = "upconv"
number_of_classes = 1
; ---> parameters for the ResNet CGAN
pool_kernel_size = 3
kernel_classification = 7
[MISC]
save_model_directory = "saved_models"
......
[MAPPING]
trained_model_path = "saved_models/test24.pth.tar"
prediction_output_path = "test24_predictions"
trained_model_path = "saved_models/VA2.pth.tar"
prediction_output_path = "VA2_predictions"
data_directory = "/well/win-biobank/projects/imaging/data/data3/subjectsAll/"
mapping_data_file = "dMRI/autoptx_preproc/tractsNormSummed.nii.gz"
data_list = "datasets/test.txt"
brain_mask_path = "utils/MNI152_T1_2mm_brain_mask.nii.gz"
rsfmri_mean_mask_path = "utils/mean_dr_stage2.nii.gz"
dmri_mean_mask_path = "utils/mean_tractsNormSummed_downsampled.nii.gz"
mean_mask_path = "utils/mean_dr_stage2.nii.gz"
scaling_factors = "datasets/scaling_factors.pkl"
mean_reduction = True
regression_factors = "datasets/regression_weights.pkl"
mean_regression = False
mean_subtraction = True
device = 0
exit_on_error = True
......@@ -10,11 +10,9 @@ setup(
install_requires=[
'pip',
'matplotlib',
'nibabel',
'numpy',
'pandas',
'torch==1.4',
'h5py',
'fslpy',
'tensorboardX',
'sklearn',
......
......@@ -63,6 +63,8 @@ class Solver():
optimizer,
optimizer_arguments={},
loss_function=MSELoss(),
# loss_function=torch.nn.L1Loss(),
# loss_function=torch.nn.CosineEmbeddingLoss(),
model_name='BrainMapper',
labels=None,
number_epochs=10,
......@@ -83,8 +85,10 @@ class Solver():
if torch.cuda.is_available():
self.loss_function = loss_function.cuda(device)
self.MSE = MSELoss().cuda(device)
else:
self.loss_function = loss_function
self.MSE = MSELoss()
self.model_name = model_name
self.labels = labels
......@@ -152,6 +156,7 @@ class Solver():
previous_checkpoint = None
previous_loss = None
previous_MSE = None
print('****************************************************************')
print('TRAINING IS STARTING!')
......@@ -175,6 +180,7 @@ class Solver():
print('-> Phase: {}'.format(phase))
losses = []
MSEs = []
if phase == 'train':
model.train()
......@@ -189,10 +195,8 @@ class Solver():
X = torch.unsqueeze(X, dim=1)
y = torch.unsqueeze(y, dim=1)
print('X range:', torch.min(X), torch.max(X))
print('y range:', torch.min(y), torch.max(y))
MNI152_T1_2mm_brain_mask = torch.unsqueeze(torch.unsqueeze(self.MNI152_T1_2mm_brain_mask, dim=0), dim=0)
MNI152_T1_2mm_brain_mask = torch.unsqueeze(
torch.unsqueeze(self.MNI152_T1_2mm_brain_mask, dim=0), dim=0)
if model.test_if_cuda:
X = X.cuda(self.device, non_blocking=True)
......@@ -202,13 +206,14 @@ class Solver():
y_hat = model(X) # Forward pass & Masking
print('y_hat range:', torch.min(y_hat), torch.max(y_hat))
y_hat = torch.mul(y_hat, MNI152_T1_2mm_brain_mask)
print('y_hat masked range:', torch.min(y_hat), torch.max(y_hat))
loss = self.loss_function(y_hat, y) # Loss computation
# loss = self.loss_function(y_hat+1e-4, y+1e-4, torch.tensor(1.0).cuda(self.device, non_blocking=True))
# We also calculate a separate MSE for cost function comparison!
MSE = self.MSE(y_hat, y)
MSEs.append(MSE.item())
if phase == 'train':
optimizer.zero_grad() # Zero the parameter gradients
......@@ -226,7 +231,7 @@ class Solver():
# Clear the memory
del X, y, y_hat, loss, MNI152_T1_2mm_brain_mask
del X, y, y_hat, loss, MNI152_T1_2mm_brain_mask, MSE
torch.cuda.empty_cache()
if phase == 'validation':
......@@ -240,9 +245,14 @@ class Solver():
if phase == 'train':
self.LogWriter.loss_per_epoch(losses, phase, epoch)
self.LogWriter.MSE_per_epoch(MSEs, phase, epoch)
elif phase == 'validation':
self.LogWriter.loss_per_epoch(losses, phase, epoch, previous_loss=previous_loss)
self.LogWriter.loss_per_epoch(
losses, phase, epoch, previous_loss=previous_loss)
previous_loss = np.mean(losses)
self.LogWriter.MSE_per_epoch(
MSEs, phase, epoch, previous_loss=previous_MSE)
previous_MSE = np.mean(MSEs)
if phase == 'validation':
early_stop, save_checkpoint = self.EarlyStopping(
......@@ -300,6 +310,9 @@ class Solver():
print('Final Model Saved in: {}'.format(model_output_path))
print('****************************************************************')
if self.start_epoch >= self.number_epochs+1:
validation_loss = None
return validation_loss
def save_checkpoint(self, state, filename):
......
"""Biobank Data Stats Calculator
Description:
This file contains the relevant scripts for producing a database containing relevant statistics about the imaing data from the UK Biobank.
This is a standalone scrip, intended to be used only once during the project. Hence, it is not integrated into the larger utils packages.
Usage:
To use content from this folder, import the functions and instantiate them as you wish to use them:
from utils.DSbiobank import function_name
"""
import numpy as np
from fsl.data.image import Image
from fsl.utils.image.resample import resampleToPixdims
import matplotlib.pyplot as plt
from data_utils import directory_reader, regression_weight_calculator
from tempfile import TemporaryFile
from datetime import datetime
import pandas as pd
import os
def stats_calc(array):
""" Statistics calculator
Function calculating all the required statistics for every array
Args:
array (np.array): 3D array of subject data
Returns:
*name* (float): Float representing a different statistic
"""
min_val = np.min(array)
max_val = np.max(array)
mean_val = np.mean(array)
med_val = np.median(array)
std_val = np.std(array)
perc1 = np.percentile(array, 1)
perc25 = np.percentile(array, 25)
perc75 = np.percentile(array, 75)
perc99 = np.percentile(array, 99)
perc0_1 = np.percentile(array, 0.1)
perc0_2 = np.percentile(array, 0.2)
perc0_3 = np.percentile(array, 0.3)
perc0_4 = np.percentile(array, 0.4)
perc0_5 = np.percentile(array, 0.5)
perc0_6 = np.percentile(array, 0.6)
perc0_7 = np.percentile(array, 0.7)
perc0_8 = np.percentile(array, 0.8)
perc0_9 = np.percentile(array, 0.9)
perc99_1 = np.percentile(array, 99.1)
perc99_2 = np.percentile(array, 99.2)
perc99_3 = np.percentile(array, 99.3)
perc99_4 = np.percentile(array, 99.4)
perc99_5 = np.percentile(array, 99.5)
perc99_6 = np.percentile(array, 99.6)
perc99_7 = np.percentile(array, 99.7)
perc99_8 = np.percentile(array, 99.8)
perc99_9 = np.percentile(array, 99.9)
perc2 = np.percentile(array, 2)
perc3 = np.percentile(array, 3)
perc4 = np.percentile(array, 4)
perc5 = np.percentile(array, 5)
perc95 = np.percentile(array, 95)
perc96 = np.percentile(array, 96)
perc97 = np.percentile(array, 97)
perc98 = np.percentile(array, 98)
return min_val, max_val, mean_val, med_val, std_val, perc1, perc25, perc75, perc99, perc0_1, perc0_2, perc0_3, perc0_4, perc0_5, perc0_6, perc0_7, perc0_8, perc0_9, perc99_1, perc99_2, perc99_3, perc99_4, perc99_5, perc99_6, perc99_7, perc99_8, perc99_9, perc2, perc3, perc4, perc5, perc95, perc96, perc97, perc98
def database_generator(data_directory, train_inputs, train_targets, rsfMRI_mean_mask_path, dMRI_mean_mask_path):
subDirectoryList, _ = directory_reader(folder_location=os.path.join(
os.path.expanduser("~"), data_directory), subject_number=None, write_txt=False)
dmri_imaging_dictionary = {}
rsfmri_imaging_dictionary = {}
dictionary_labels = ['w_reg', 'min', 'max', 'mean', 'med', 'std', '1p', '25p', '75p', '99p',
'0.1p', '0.2p', '0.3p', '0.4p', '0.5p', '0.6p', '0.7p', '0.8p', '0.9p',
'99.1p', '99.2p', '99.3p', '99.4p', '99.5p', '99.6p', '99.7p', '99.8p', '99.9p',
'2p', '3p', '4p', '5p', '95p', '96p', '97p', '98p']
dmri_mean_volume = Image(dMRI_mean_mask_path).data
rsfmri_mean_volume = Image(rsfMRI_mean_mask_path).data[:, :, :, 0]
index = 0
for subject in subDirectoryList:
index += 1
w_dMRI, w_rsfMRI = regression_weight_calculator(
data_directory, subject, train_inputs, train_targets, rsfMRI_mean_mask_path, dMRI_mean_mask_path)
# ------------------ BOanaPelea626273532!C
dmri_path = os.path.join(os.path.expanduser(
"~"), data_directory, subject, train_inputs)
dmri_volume, _ = resampleToPixdims(Image(dmri_path), (2, 2, 2))
dmri_volume = np.subtract(
dmri_volume, np.multiply(w_dMRI, dmri_mean_volume))
min_val, max_val, mean_val, med_val, std_val, perc1, perc25, perc75, perc99, perc0_1, perc0_2, perc0_3, perc0_4, perc0_5, perc0_6, perc0_7, perc0_8, perc0_9, perc99_1, perc99_2, perc99_3, perc99_4, perc99_5, perc99_6, perc99_7, perc99_8, perc99_9, perc2, perc3, perc4, perc5, perc95, perc96, perc97, perc98 = stats_calc(
dmri_volume)
dmri_imaging_dictionary[subject] = [w_dMRI, min_val, max_val, mean_val, med_val, std_val, perc1, perc25, perc75, perc99, perc0_1, perc0_2, perc0_3, perc0_4, perc0_5, perc0_6,
perc0_7, perc0_8, perc0_9, perc99_1, perc99_2, perc99_3, perc99_4, perc99_5, perc99_6, perc99_7, perc99_8, perc99_9, perc2, perc3, perc4, perc5, perc95, perc96, perc97, perc98]
del dmri_path, dmri_volume, w_dMRI, min_val, max_val, mean_val, med_val, std_val, perc1, perc25, perc75, perc99, perc0_1, perc0_2, perc0_3, perc0_4, perc0_5, perc0_6, perc0_7, perc0_8, perc0_9, perc99_1, perc99_2, perc99_3, perc99_4, perc99_5, perc99_6, perc99_7, perc99_8, perc99_9, perc2, perc3, perc4, perc5, perc95, perc96, perc97, perc98
# ------------------
rsfmri_path = os.path.join(os.path.expanduser(
"~"), data_directory, subject, train_targets)
rsfmri_volume = Image(rsfmri_path).data[:, :, :, 0]
rsfmri_volume = np.subtract(
rsfmri_volume, np.multiply(w_rsfMRI, rsfmri_mean_volume))
min_val, max_val, mean_val, med_val, std_val, perc1, perc25, perc75, perc99, perc0_1, perc0_2, perc0_3, perc0_4, perc0_5, perc0_6, perc0_7, perc0_8, perc0_9, perc99_1, perc99_2, perc99_3, perc99_4, perc99_5, perc99_6, perc99_7, perc99_8, perc99_9, perc2, perc3, perc4, perc5, perc95, perc96, perc97, perc98 = stats_calc(
rsfmri_volume)
rsfmri_imaging_dictionary[subject] = [w_rsfMRI, min_val, max_val, mean_val, med_val, std_val, perc1, perc25, perc75, perc99, perc0_1, perc0_2, perc0_3, perc0_4, perc0_5, perc0_6,
perc0_7, perc0_8, perc0_9, perc99_1, perc99_2, perc99_3, perc99_4, perc99_5, perc99_6, perc99_7, perc99_8, perc99_9, perc2, perc3, perc4, perc5, perc95, perc96, perc97, perc98]
del rsfmri_path, rsfmri_volume, w_rsfMRI, min_val, max_val, mean_val, med_val, std_val, perc1, perc25, perc75, perc99, perc0_1, perc0_2, perc0_3, perc0_4, perc0_5, perc0_6, perc0_7, perc0_8, perc0_9, perc99_1, perc99_2, perc99_3, perc99_4, perc99_5, perc99_6, perc99_7, perc99_8, perc99_9, perc2, perc3, perc4, perc5, perc95, perc96, perc97, perc98
# ------------------
dmri_imaging_df = pd.DataFrame.from_dict(
dmri_imaging_dictionary, orient="index", columns=dictionary_labels)
dmri_imaging_df.to_pickle('utils/dmri_stats.pkl')
rsfmri_imaging_df = pd.DataFrame.from_dict(
rsfmri_imaging_dictionary, orient="index", columns=dictionary_labels)
rsfmri_imaging_df.to_pickle('utils/rsfmri_stats.pkl')
if __name__ == '__main__':
print('---> Start!')
rsfmri_mean_mask_path = "utils/mean_dr_stage2.nii.gz"
dmri_mean_mask_path = "utils/mean_tractsNormSummed_downsampled.nii.gz"
train_list = "/well/win-biobank/projects/imaging/data/data3/subjectsAll/subj_22k.txt"
data_directory = "/well/win-biobank/projects/imaging/data/data3/subjectsAll/"
train_inputs = "dMRI/autoptx_preproc/tractsNormSummed.nii.gz"
train_targets = "fMRI/rfMRI_25.dr/dr_stage2.nii.gz"
database_generator(data_directory, train_inputs, train_targets,
rsfmri_mean_mask_path, dmri_mean_mask_path)
print('---> Finished!')
This diff is collapsed.
......@@ -72,10 +72,7 @@ class LogWriter():
self.current_iteration = 1
if labels is not None:
self.labels = self.labels_generator(labels)
else:
self.labels = ['rsfMRI']
self.labels = ['rsfMRI']
self.logger = logging.getLogger()
file_handler = logging.FileHandler(
......@@ -135,133 +132,51 @@ class LogWriter():
self.log_writer[phase].add_scalar('loss/epoch', loss, epoch)
def close(self):