diff --git a/pytorch-ddp-example/run.sbatch b/pytorch-ddp-example/run.sbatch
index 19c49e3a7e6bbbe389311a2795ad8b2b7c880942..7997600b53da4c2ae4a2fb4c1c9603fc20e53551 100644
--- a/pytorch-ddp-example/run.sbatch
+++ b/pytorch-ddp-example/run.sbatch
@@ -30,7 +30,7 @@ export MASTER_PORT=54123
 
 # Prevent NCCL not figuring out how to initialize.
 export NCCL_SOCKET_IFNAME=ib0
-# Prevent GLOO not being able to communicate.
+# Prevent Gloo not being able to communicate.
 export GLOO_SOCKET_IFNAME=ib0
 
 srun env -u CUDA_VISIBLE_DEVICES python -u -m torchrun_jsc \
diff --git a/pytorch-fsdp-example/run.sbatch b/pytorch-fsdp-example/run.sbatch
index 2e310246b54090c6ae107f3915ff57c3948534b0..1f5941fc5e7438bcd25cd4a8313bfe1f8ecf2dd6 100644
--- a/pytorch-fsdp-example/run.sbatch
+++ b/pytorch-fsdp-example/run.sbatch
@@ -30,7 +30,7 @@ export MASTER_PORT=54123
 
 # Prevent NCCL not figuring out how to initialize.
 export NCCL_SOCKET_IFNAME=ib0
-# Prevent GLOO not being able to communicate.
+# Prevent Gloo not being able to communicate.
 export GLOO_SOCKET_IFNAME=ib0
 
 # For the example, we get a bunch of information from the