Commit f5c757b6 authored by Andrei-Claudiu Roibu's avatar Andrei-Claudiu Roibu 🖥
Browse files

loss per epoch is not the mean, absolute change is printed

parent a817b485
......@@ -109,7 +109,7 @@ class LogWriter():
self.log_writer['train'].add_scalar(
'loss/iteration', loss_per_iteration, iteration)
def loss_per_epoch(self, losses, phase, epoch):
def loss_per_epoch(self, losses, phase, epoch, previous_loss=None):
"""Log function
This function records the loss for every epoch.
......@@ -118,14 +118,21 @@ class LogWriter():
losses (list): Values of all the losses recorded during the training epoch
phase (str): Current run mode or phase
epoch (int): Current epoch value
previous_loss(float): Value of the previous epoch's loss
"""
loss = np.mean(losses)
if phase == 'train':
loss = losses[-1]
# loss = losses[-1]
print("Loss for Epoch {} of {} is: {}".format(epoch, phase, loss))
else:
loss = np.mean(losses)
# loss = np.mean(losses)
if previous_loss == None:
print("Loss for Epoch {} of {} is: {}".format(epoch, phase, loss))
else:
print("Loss for Epoch {} of {} is {} and Absolute Change is {}".format(epoch, phase, loss, previous_loss - loss))
print("Loss for Epoch {} of {} is: {}".format(epoch, phase, loss))
self.log_writer[phase].add_scalar('loss/epoch', loss, epoch)
def close(self):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment