Skip to content

Commit

Permalink
Report the total number of tokens per second in the log (src+tgt) (#993)
Browse files Browse the repository at this point in the history
  • Loading branch information
guillaumekln authored Jan 9, 2023
1 parent 802bbfc commit 5e8b851
Showing 1 changed file with 8 additions and 5 deletions.
13 changes: 8 additions & 5 deletions opennmt/training.py
Original file line number Diff line number Diff line change
Expand Up @@ -597,24 +597,27 @@ def log(self, is_master=True):
summary["steps_per_sec"],
description="Training steps per second",
)
steps_per_sec_fmt = "steps/s = %0.2f" % summary["steps_per_sec"]

words_per_sec_fmt = []
for name, avg in summary["words_per_sec"].items():
tf.summary.scalar(
"words_per_sec/%s" % name,
avg,
description="%s words per second" % name.capitalize(),
)
words_per_sec_fmt.append("%s words/s = %d" % (name, avg))

tf.get_logger().info(
"Step = %d ; %s ; Learning rate = %f ; Loss = %f",
"Step = %d ; steps/s = %0.2f, tokens/s = %d (%s) ; Learning rate = %f ; Loss = %f",
summary["step"],
", ".join([steps_per_sec_fmt] + list(sorted(words_per_sec_fmt))),
summary["steps_per_sec"],
sum(summary["words_per_sec"].values()),
", ".join(
"%d %s" % (avg, name)
for name, avg in sorted(summary["words_per_sec"].items())
),
summary["learning_rate"],
summary["loss"],
)

tf.summary.scalar("loss", summary["loss"], description="Training loss")
tf.summary.scalar(
"optim/learning_rate", summary["learning_rate"], description="Learning rate"
Expand Down

0 comments on commit 5e8b851

Please sign in to comment.