From bf19cb325cfdbd2b42ee1d9f8cd2b11e38b9afb5 Mon Sep 17 00:00:00 2001 From: Sushrut Karnik Date: Fri, 13 Mar 2026 00:07:06 +0100 Subject: [PATCH] turn tokenizer train back on --- runs/runcpu.sh | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/runs/runcpu.sh b/runs/runcpu.sh index 74f4e93..b40b0bb 100755 --- a/runs/runcpu.sh +++ b/runs/runcpu.sh @@ -31,10 +31,9 @@ if [ -z "$WANDB_RUN" ]; then fi # train tokenizer on ~2B characters (~34 seconds on my MacBook Pro M3 Max) -# python -m nanochat.dataset -n 8 -# python -m scripts.tok_train --max-chars=2000000000 +python -m nanochat.dataset -n 8 +python -m scripts.tok_train --max-chars=2000000000 python -m scripts.tok_eval -# Target directory: /Users/sushrutkarnik_1/.cache/nanochat/base_data_climbmix # train a small 4 layer model # I tuned this run to complete in about 30 minutes on my MacBook Pro M3 Max.