Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Andrei-Claudiu Roibu
BrainMapper
Commits
159001d8
Commit
159001d8
authored
Jul 21, 2020
by
Andrei Roibu
Browse files
prepared execution of first VA code + fixed bugs
parent
1515a03b
Changes
2
Hide whitespace changes
Inline
Side-by-side
solver.py
View file @
159001d8
...
...
@@ -62,9 +62,9 @@ class Solver():
experiment_name
,
optimizer
,
optimizer_arguments
=
{},
#
loss_function=MSELoss(),
# loss_function=torch.nn.L1Loss(),
loss_function
=
torch
.
nn
.
CosineEmbeddingLoss
(),
loss_function
=
MSELoss
(),
# loss_function=torch.nn.L1Loss(),
#
loss_function=torch.nn.CosineEmbeddingLoss(),
model_name
=
'BrainMapper'
,
labels
=
None
,
number_epochs
=
10
,
...
...
@@ -208,9 +208,8 @@ class Solver():
y_hat
=
torch
.
mul
(
y_hat
,
MNI152_T1_2mm_brain_mask
)
# loss = self.loss_function(y_hat, y) # Loss computation
loss
=
self
.
loss_function
(
y_hat
+
1e-4
,
y
+
1e-4
,
torch
.
tensor
(
1.0
).
cuda
(
self
.
device
,
non_blocking
=
True
))
loss
=
self
.
loss_function
(
y_hat
,
y
)
# Loss computation
# loss = self.loss_function(y_hat+1e-4, y+1e-4, torch.tensor(1.0).cuda(self.device, non_blocking=True))
# We also calculate a separate MSE for cost function comparison!
MSE
=
self
.
MSE
(
y_hat
,
y
)
...
...
utils/modules.py
View file @
159001d8
...
...
@@ -309,8 +309,8 @@ class ResNetClassifierBlock3D(nn.Module):
self
.
normalization
=
nn
.
InstanceNorm3d
(
num_features
=
parameters
[
'number_of_classes'
])
#
self.activation = nn.Sigmoid()
self
.
activation
=
nn
.
Tanh
()
self
.
activation
=
nn
.
Sigmoid
()
#
self.activation = nn.Tanh()
# TODO: Might be wworth looking at GANS for image generation, and adding padding
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment