use orig_model in sampling, silly of me to miss this

This commit is contained in:
Andrej Karpathy 2025-10-20 00:04:15 +00:00
parent 9467d83cf2
commit 2bc521a6de

View File

@ -219,7 +219,7 @@ for step in range(num_iterations + 1):
"My favorite color is", "My favorite color is",
"If 5*x + 3 = 13, then x is", "If 5*x + 3 = 13, then x is",
] ]
engine = Engine(model, tokenizer) engine = Engine(orig_model, tokenizer)
for prompt in prompts: for prompt in prompts:
tokens = tokenizer(prompt, prepend="<|bos|>") tokens = tokenizer(prompt, prepend="<|bos|>")
with autocast_ctx: with autocast_ctx: