From 1963530984c9872af2909c7db5f9b6934558c5c6 Mon Sep 17 00:00:00 2001 From: Yin Li Date: Mon, 2 Mar 2020 11:31:38 -0500 Subject: [PATCH] Add gradient norms to tensorboard --- map2map/train.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/map2map/train.py b/map2map/train.py index 409b4b4..c919b29 100644 --- a/map2map/train.py +++ b/map2map/train.py @@ -348,6 +348,25 @@ def train(epoch, loader, model, criterion, optimizer, scheduler, 'real': adv_loss_real.item(), }, global_step=batch) + # gradients of the weights of the first and the last layer + grads = list(p.grad for n, p in model.named_parameters() + if n.endswith('weight')) + grads = [grads[0], grads[1]] + grads = [g.detach().norm().item() for g in grads] + logger.add_scalars('grad', { + 'first': grads[0], + 'last': grads[1], + }, global_step=batch) + if args.adv and epoch >= args.adv_start: + grads = list(p.grad for n, p in adv_model.named_parameters() + if n.endswith('weight')) + grads = [grads[0], grads[1]] + grads = [g.detach().norm().item() for g in grads] + logger.add_scalars('grad/adv', { + 'first': grads[0], + 'last': grads[1], + }, global_step=batch) + dist.all_reduce(epoch_loss) epoch_loss /= len(loader) * world_size if rank == 0: