diff --git a/runs/miniseries.sh b/runs/miniseries.sh index 01c4459..074c87e 100644 --- a/runs/miniseries.sh +++ b/runs/miniseries.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # See speedrun.sh for more comments # Usage: ./miniseries.sh [series_name] diff --git a/runs/runcpu.sh b/runs/runcpu.sh index 853fa1f..277e7dc 100755 --- a/runs/runcpu.sh +++ b/runs/runcpu.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Showing an example run for exercising some of the code paths on the CPU (or MPS on Macbooks) # This script was last updated/tuned on Jan 17, 2026. diff --git a/runs/scaling_laws.sh b/runs/scaling_laws.sh index f1e2fd4..6958ddd 100644 --- a/runs/scaling_laws.sh +++ b/runs/scaling_laws.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash LABEL="jan26" diff --git a/runs/speedrun.sh b/runs/speedrun.sh index fa50694..bafd5ec 100644 --- a/runs/speedrun.sh +++ b/runs/speedrun.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script is configured to train your own GPT-2 grade LLM (pretraining + finetuning) # It is designed to run on a blank 8XH100 GPU node and takes approximately 3 hours to complete.