use orig_model in sampling, silly of me to miss this

This commit is contained in:
Andrej Karpathy 2025-10-20 00:05:09 +00:00
parent 2bc521a6de
commit c1d2ed1c13

View File

@ -219,7 +219,7 @@ for step in range(num_iterations + 1):
"My favorite color is",
"If 5*x + 3 = 13, then x is",
]
engine = Engine(orig_model, tokenizer)
engine = Engine(orig_model, tokenizer) # use orig_model to avoid recompilation
for prompt in prompts:
tokens = tokenizer(prompt, prepend="<|bos|>")
with autocast_ctx: