Commit 4d76c5ff authored by Andrei-Claudiu Roibu's avatar Andrei-Claudiu Roibu 🖥
Browse files

creating save_model_directory if it does not exist

parent 16550d2e
......@@ -39,7 +39,7 @@ import torch.utils.data as data
from solver import Solver
from BrainMapperUNet import BrainMapperUNet3D
from utils.data_utils import get_datasets, data_test_train_validation_split, update_shuffling_flag
from utils.data_utils import get_datasets, data_test_train_validation_split, update_shuffling_flag, create_folder
import utils.data_evaluation_utils as evaluations
from utils.data_logging_utils import LogWriter
......@@ -137,33 +137,36 @@ def train(data_parameters, training_parameters, network_parameters, misc_paramet
else:
BrainMapperModel = BrainMapperUNet3D(network_parameters)
# solver = Solver(model=BrainMapperModel,
# device=misc_parameters['device'],
# number_of_classes=network_parameters['number_of_classes'],
# experiment_name=training_parameters['experiment_name'],
# optimizer_arguments={'lr': training_parameters['learning_rate'],
# 'betas': training_parameters['optimizer_beta'],
# 'eps': training_parameters['optimizer_epsilon'],
# 'weight_decay': training_parameters['optimizer_weigth_decay']
# },
# model_name=misc_parameters['model_name'],
# number_epochs=training_parameters['number_of_epochs'],
# loss_log_period=training_parameters['loss_log_period'],
# learning_rate_scheduler_step_size=training_parameters[
# 'learning_rate_scheduler_step_size'],
# learning_rate_scheduler_gamma=training_parameters['learning_rate_scheduler_gamma'],
# use_last_checkpoint=training_parameters['use_last_checkpoint'],
# experiment_directory=misc_parameters['experiments_directory'],
# logs_directory=misc_parameters['logs_directory']
# )
# solver.train(train_loader, validation_loader)
# model_output_path = os.path.join(
# misc_parameters['save_model_directory'], training_parameters['final_model_output_file'])
# BrainMapperModel.save(model_output_path)
# print("Final Model Saved in: {}".format(model_output_path))
solver = Solver(model=BrainMapperModel,
device=misc_parameters['device'],
number_of_classes=network_parameters['number_of_classes'],
experiment_name=training_parameters['experiment_name'],
optimizer_arguments={'lr': training_parameters['learning_rate'],
'betas': training_parameters['optimizer_beta'],
'eps': training_parameters['optimizer_epsilon'],
'weight_decay': training_parameters['optimizer_weigth_decay']
},
model_name=misc_parameters['model_name'],
number_epochs=training_parameters['number_of_epochs'],
loss_log_period=training_parameters['loss_log_period'],
learning_rate_scheduler_step_size=training_parameters[
'learning_rate_scheduler_step_size'],
learning_rate_scheduler_gamma=training_parameters['learning_rate_scheduler_gamma'],
use_last_checkpoint=training_parameters['use_last_checkpoint'],
experiment_directory=misc_parameters['experiments_directory'],
logs_directory=misc_parameters['logs_directory']
)
solver.train(train_loader, validation_loader)
model_output_path = os.path.join(
misc_parameters['save_model_directory'], training_parameters['final_model_output_file'])
create_folder(misc_parameters['save_model_directory'])
BrainMapperModel.save(model_output_path)
print("Final Model Saved in: {}".format(model_output_path))
def evaluate_score(training_parameters, network_parameters, misc_parameters, evaluation_parameters):
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment