修改gradient accumulation下loss计算逻辑

This commit is contained in:
Duzeyao
2019-10-25 23:49:27 +08:00
parent c7c3f0349e
commit ca5eef1033
2 changed files with 2 additions and 2 deletions

View File

@@ -221,7 +221,7 @@ def main():
step + 1, step + 1,
piece_num, piece_num,
epoch + 1, epoch + 1,
running_loss * gradient_accumulation / log_step)) running_loss / log_step))
running_loss = 0 running_loss = 0
piece_num += 1 piece_num += 1

View File

@@ -197,7 +197,7 @@ def main():
(step + 1) // gradient_accumulation, (step + 1) // gradient_accumulation,
piece_num, piece_num,
epoch + 1, epoch + 1,
running_loss * gradient_accumulation / log_step)) running_loss / log_step))
running_loss = 0 running_loss = 0
piece_num += 1 piece_num += 1