update lr of CMU Mocap

This commit is contained in:
Iridoudou
2021-08-11 16:05:12 +08:00
parent 2963a13933
commit d1c497bbbc
2 changed files with 5 additions and 5 deletions

View File

@ -3,7 +3,7 @@
"GENDER": "neutral"
},
"TRAIN": {
"LEARNING_RATE": 2e-2,
"LEARNING_RATE": 1e-1,
"MAX_EPOCH": 1000,
"WRITE": 1,
"OPTIMIZE_SCALE":1,

View File

@ -72,8 +72,8 @@ def train(smpl_layer, target,
scale = params["scale"]
early_stop = Early_Stop()
for epoch in tqdm(range(cfg.TRAIN.MAX_EPOCH)):
# for epoch in range(cfg.TRAIN.MAX_EPOCH):
# for epoch in tqdm(range(cfg.TRAIN.MAX_EPOCH)):
for epoch in range(cfg.TRAIN.MAX_EPOCH):
verts, Jtr = smpl_layer(pose_params, th_betas=shape_params)
loss = F.smooth_l1_loss(Jtr.index_select(1, index["smpl_index"]) * 100 * scale,
target.index_select(1, index["dataset_index"]) * 100)
@ -89,8 +89,8 @@ def train(smpl_layer, target,
break
if epoch % cfg.TRAIN.WRITE == 0:
# logger.info("Epoch {}, lossPerBatch={:.6f}, scale={:.4f} EarlyStopSatis: {}".format(
# epoch, float(loss),float(scale), early_stop.satis_num))
logger.info("Epoch {}, lossPerBatch={:.6f}, scale={:.4f} EarlyStopSatis: {}".format(
epoch, float(loss),float(scale), early_stop.satis_num))
writer.add_scalar('loss', float(loss), epoch)
writer.add_scalar('learning_rate', float(
optimizer.state_dict()['param_groups'][0]['lr']), epoch)