Skip to content

Commit b384303

Browse files
committed
fix: comment from logile to logfile
1 parent f1e2ace commit b384303

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

train_gpt2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -840,7 +840,7 @@ def get_lr(it):
840840
# the 0th iteration is often an outlier (much slower) => skip logging it
841841
tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0)
842842
print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | lr {lr:.2e} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)")
843-
# log to logile
843+
# log to logfile
844844
if master_process and logfile is not None:
845845
with open(logfile, "a") as f:
846846
f.write("s:%d trl:%f\n" % (step, lossf))

train_llama3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1235,7 +1235,7 @@ def get_lr(it):
12351235
# the 0th iteration is often an outlier (much slower) => skip logging it
12361236
tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0)
12371237
print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | lr {lr:.2e} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)")
1238-
# log to logile
1238+
# log to logfile
12391239
if master_process and logfile is not None:
12401240
with open(logfile, "a") as f:
12411241
f.write("s:%d trl:%f\n" % (step, lossf))

0 commit comments

Comments
 (0)