diff --git a/chapters/en/chapter7/6.mdx b/chapters/en/chapter7/6.mdx index 7a498a863..6c6418c75 100644 --- a/chapters/en/chapter7/6.mdx +++ b/chapters/en/chapter7/6.mdx @@ -870,7 +870,6 @@ for epoch in range(num_train_epochs): if step % 100 == 0: accelerator.print( { - "lr": get_lr(), "samples": step * samples_per_step, "steps": completed_steps, "loss/train": loss.item() * gradient_accumulation_steps, @@ -912,4 +911,4 @@ And that's it -- you now have your own custom training loop for causal language -{/if} \ No newline at end of file +{/if}