From f9c327d2a21a2e9c87448bef82f32d7ad8f2830d Mon Sep 17 00:00:00 2001 From: Iridoudou <2534936416@qq.com> Date: Wed, 11 Aug 2021 19:36:41 +0800 Subject: [PATCH] update code --- fit/tools/train.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/fit/tools/train.py b/fit/tools/train.py index 7977adc..954b08f 100644 --- a/fit/tools/train.py +++ b/fit/tools/train.py @@ -72,8 +72,8 @@ def train(smpl_layer, target, scale = params["scale"] early_stop = Early_Stop() - # for epoch in tqdm(range(cfg.TRAIN.MAX_EPOCH)): - for epoch in range(cfg.TRAIN.MAX_EPOCH): + for epoch in tqdm(range(cfg.TRAIN.MAX_EPOCH)): + # for epoch in range(cfg.TRAIN.MAX_EPOCH): verts, Jtr = smpl_layer(pose_params, th_betas=shape_params) loss = F.smooth_l1_loss(Jtr.index_select(1, index["smpl_index"]) * 100 * scale, target.index_select(1, index["dataset_index"]) * 100) @@ -89,8 +89,8 @@ def train(smpl_layer, target, break if epoch % cfg.TRAIN.WRITE == 0: - logger.info("Epoch {}, lossPerBatch={:.6f}, scale={:.4f} EarlyStopSatis: {}".format( - epoch, float(loss),float(scale), early_stop.satis_num)) + # logger.info("Epoch {}, lossPerBatch={:.6f}, scale={:.4f} EarlyStopSatis: {}".format( + # epoch, float(loss),float(scale), early_stop.satis_num)) writer.add_scalar('loss', float(loss), epoch) writer.add_scalar('learning_rate', float( optimizer.state_dict()['param_groups'][0]['lr']), epoch)