Compare commits

...

5 Commits

Author SHA1 Message Date
adamnemecek
9fdda24de1
Merge f88153065f into 4a87a0d19f 2025-11-19 11:42:12 -05:00
Andrej
4a87a0d19f
Merge pull request #299 from samjabrahams/rotary_embedding_head_dim_comment_cleanup
Fix comment: rotary embeddings final dimension size
2025-11-17 13:29:21 -08:00
Sam Abrahams
11e68bf442 Fix comment: rotary embeddings final dimension size 2025-11-17 11:32:56 -05:00
adamnemecek
f88153065f use a Delta enum 2025-10-14 16:36:34 -07:00
adamnemecek
a13c9ca6ae use Ordering::then_wtth 2025-10-14 16:23:33 -07:00
2 changed files with 19 additions and 14 deletions

View File

@ -244,7 +244,7 @@ class GPT(nn.Module):
def forward(self, idx, targets=None, kv_cache=None, loss_reduction='mean'):
B, T = idx.size()
# Grab the rotary embeddings for the current sequence length (they are of shape (1, seq_len, 1, head_dim))
# Grab the rotary embeddings for the current sequence length (they are of shape (1, seq_len, 1, head_dim/2))
assert T <= self.cos.size(1), f"Sequence length grew beyond the rotary embeddings cache: {T} > {self.cos.size(1)}"
assert idx.device == self.cos.device, f"Rotary embeddings and idx are on different devices: {idx.device} != {self.cos.device}"
assert self.cos.dtype == torch.bfloat16, "Rotary embeddings must be in bfloat16"

View File

@ -27,6 +27,13 @@ pub struct Tokenizer {
// ------------------------ internal helpers ------------------------
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(i8)]
enum Delta {
Rem = -1,
Ins = 1,
}
#[derive(Clone, Debug)]
struct Word {
ids: Vec<u32>,
@ -48,7 +55,7 @@ impl Word {
/// -1 for removed pairs, +1 for newly created pairs.
///
/// NOTE: this version deliberately avoids a HashMap in the hot loop.
fn merge_pair(&mut self, pair: Pair, new_id: u32) -> Vec<(Pair, i32)> {
fn merge_pair(&mut self, pair: Pair, new_id: u32) -> Vec<(Pair, Delta)> {
let (a, b) = pair;
let n = self.ids.len();
if n < 2 {
@ -56,7 +63,7 @@ impl Word {
}
let mut out: Vec<u32> = Vec::with_capacity(n);
let mut deltas: Vec<(Pair, i32)> = Vec::with_capacity(6);
let mut deltas: Vec<(Pair, Delta)> = Vec::with_capacity(6);
let mut i = 0;
while i < n {
@ -66,13 +73,13 @@ impl Word {
// remove old pairs
if let Some(x) = left {
deltas.push(((x, a), -1));
deltas.push(((x, new_id), 1));
deltas.push(((x, a), Delta::Rem));
deltas.push(((x, new_id), Delta::Ins));
}
deltas.push(((a, b), -1));
deltas.push(((a, b), Delta::Rem));
if let Some(y) = right {
deltas.push(((b, y), -1));
deltas.push(((new_id, y), 1));
deltas.push(((b, y), Delta::Rem));
deltas.push(((new_id, y), Delta::Ins));
}
// write merged token
@ -112,12 +119,10 @@ impl PartialOrd for MergeJob {
impl Ord for MergeJob {
fn cmp(&self, other: &Self) -> Ordering {
// Max-heap by count; tie-break to ascending pair order (deterministic)
if self.count != other.count {
self.count.cmp(&other.count)
} else {
self.count.cmp(&other.count).then_with(||
// ascending order on the pair when counts tie
other.pair.cmp(&self.pair)
}
)
}
}
@ -217,10 +222,10 @@ impl Tokenizer {
let changes = words[word_idx].merge_pair(top.pair, new_id);
// Update global pair counts based on this word's count
for (pair, delta) in changes {
let delta_total = delta * counts[word_idx];
let delta_total = (delta as i32) * counts[word_idx];
if delta_total != 0 {
*pair_counts.entry(pair).or_default() += delta_total;
if delta > 0 {
if delta == Delta::Ins {
local_pos_updates.entry(pair).or_default().insert(word_idx);
}
}