From 11e68bf4427aef8748a8c0c3978c9c03838a9466 Mon Sep 17 00:00:00 2001 From: Sam Abrahams Date: Mon, 17 Nov 2025 11:32:56 -0500 Subject: [PATCH] Fix comment: rotary embeddings final dimension size --- nanochat/gpt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nanochat/gpt.py b/nanochat/gpt.py index 8b220c3..216343c 100644 --- a/nanochat/gpt.py +++ b/nanochat/gpt.py @@ -244,7 +244,7 @@ class GPT(nn.Module): def forward(self, idx, targets=None, kv_cache=None, loss_reduction='mean'): B, T = idx.size() - # Grab the rotary embeddings for the current sequence length (they are of shape (1, seq_len, 1, head_dim)) + # Grab the rotary embeddings for the current sequence length (they are of shape (1, seq_len, 1, head_dim/2)) assert T <= self.cos.size(1), f"Sequence length grew beyond the rotary embeddings cache: {T} > {self.cos.size(1)}" assert idx.device == self.cos.device, f"Rotary embeddings and idx are on different devices: {idx.device} != {self.cos.device}" assert self.cos.dtype == torch.bfloat16, "Rotary embeddings must be in bfloat16"