Thanks to visit codestin.com Credit goes to github.com
We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 0f47d12 commit a8e6462Copy full SHA for a8e6462
pytorch/utils.py
@@ -90,7 +90,7 @@ def do_one_epoch(self, dl, epoch):
90
metrics = {"train/train_loss": loss,
91
"train/epoch": (step + 1 + (self.n_steps_per_epoch * epoch)) / self.n_steps_per_epoch,
92
"train/example_ct": self.example_ct,
93
- "seq_per_sec":len(batch["labels"])/(tf-ti),
+ "sqe_per_sec":len(batch["labels"])/(tf-ti),
94
"seq_per_sec_dl":len(batch["labels"])/tf_with_dataloader,}
95
if step + 1 < self.n_steps_per_epoch:
96
# 🐝 Log train metrics to wandb
0 commit comments