From a1fd43b34dd67d25e4a78a88b3ecdaac985c0335 Mon Sep 17 00:00:00 2001 From: Sermet Pekin <96650846+SermetPekin@users.noreply.github.com> Date: Thu, 6 Nov 2025 10:24:04 +0300 Subject: [PATCH] Fix formatting of wandb_run initialization --- scripts/base_train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/base_train.py b/scripts/base_train.py index 0b1fa90..bae837f 100644 --- a/scripts/base_train.py +++ b/scripts/base_train.py @@ -74,7 +74,7 @@ synchronize = torch.cuda.synchronize if device_type == "cuda" else lambda: None get_max_memory = torch.cuda.max_memory_allocated if device_type == "cuda" else lambda: 0 # wandb logging init -wandb_run = get_wandb("nanochat",run=run, master_process=master_process, user_config=user_config) +wandb_run = get_wandb("nanochat", run=run, master_process=master_process, user_config=user_config) # Tokenizer will be useful for evaluation, also we need the vocab size tokenizer = get_tokenizer()