Commit 95309804 authored by Frisinghelli Daniel's avatar Frisinghelli Daniel
Browse files

Merge branch 'optim' into ai4ebv-public

parents b96d568d 7a50b5ca
......@@ -1190,8 +1190,13 @@ class NetworkTrainer(BaseConfig):
else:
loss = self.loss_function(outputs, labels)
# compute the gradients of the loss function w.r.t.
# the network weights
# stop training if loss is NaN
if torch.isnan(loss):
LOGGER.info('Encountered NaN in loss. Stopping training ...')
return self.training_state
# compute the gradients of the loss function w.r.t. the network
# weights
loss.backward()
# clip gradients
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment